[ 533.722451] env[69475]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=69475) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 533.722769] env[69475]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=69475) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 533.722821] env[69475]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=69475) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 533.723176] env[69475]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 533.820698] env[69475]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=69475) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 533.830634] env[69475]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=69475) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 533.873140] env[69475]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 534.432529] env[69475]: INFO nova.virt.driver [None req-4a8595ff-f365-4602-9c95-3c80e1095c93 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 534.504328] env[69475]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 534.504493] env[69475]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 534.504589] env[69475]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=69475) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 537.688107] env[69475]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-77e3d642-9cff-4c32-ae57-73dcae777c2c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.703900] env[69475]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=69475) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 537.704038] env[69475]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-bb39a8ee-cc2d-4ebd-8844-1f4ce6c023d6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.737473] env[69475]: INFO oslo_vmware.api [-] Successfully established new session; session ID is d8525. [ 537.737617] env[69475]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.233s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 537.738213] env[69475]: INFO nova.virt.vmwareapi.driver [None req-4a8595ff-f365-4602-9c95-3c80e1095c93 None None] VMware vCenter version: 7.0.3 [ 537.741765] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d05c015-00cc-4c06-862c-f9b97a9edecc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.758825] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa105923-5d0f-49e9-8e8e-64def155a704 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.764836] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c81c5c74-985c-4078-8b1c-bc199f0f59d8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.771440] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14bd220-91d2-468a-ae53-7f19626a263b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.784351] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270029fd-7be6-4b2c-8e9c-0b15bd5961bc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.790161] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f79355e-d1e4-4a12-b3ad-6d2180660a7b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.820721] env[69475]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-fa2dbb34-dbab-4f5d-8136-42c532e213f8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 537.825620] env[69475]: DEBUG nova.virt.vmwareapi.driver [None req-4a8595ff-f365-4602-9c95-3c80e1095c93 None None] Extension org.openstack.compute already exists. {{(pid=69475) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 537.828256] env[69475]: INFO nova.compute.provider_config [None req-4a8595ff-f365-4602-9c95-3c80e1095c93 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 538.331987] env[69475]: DEBUG nova.context [None req-4a8595ff-f365-4602-9c95-3c80e1095c93 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),5948857a-efe9-4666-84a3-f6cafd451eb4(cell1) {{(pid=69475) load_cells /opt/stack/nova/nova/context.py:464}} [ 538.334073] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 538.334314] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 538.335038] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 538.335465] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] Acquiring lock "5948857a-efe9-4666-84a3-f6cafd451eb4" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 538.335651] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] Lock "5948857a-efe9-4666-84a3-f6cafd451eb4" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 538.336674] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] Lock "5948857a-efe9-4666-84a3-f6cafd451eb4" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 538.357758] env[69475]: INFO dbcounter [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] Registered counter for database nova_cell0 [ 538.365566] env[69475]: INFO dbcounter [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] Registered counter for database nova_cell1 [ 538.368736] env[69475]: DEBUG oslo_db.sqlalchemy.engines [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69475) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 538.369101] env[69475]: DEBUG oslo_db.sqlalchemy.engines [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=69475) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 538.373841] env[69475]: ERROR nova.db.main.api [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 538.373841] env[69475]: result = function(*args, **kwargs) [ 538.373841] env[69475]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 538.373841] env[69475]: return func(*args, **kwargs) [ 538.373841] env[69475]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 538.373841] env[69475]: result = fn(*args, **kwargs) [ 538.373841] env[69475]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 538.373841] env[69475]: return f(*args, **kwargs) [ 538.373841] env[69475]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 538.373841] env[69475]: return db.service_get_minimum_version(context, binaries) [ 538.373841] env[69475]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 538.373841] env[69475]: _check_db_access() [ 538.373841] env[69475]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 538.373841] env[69475]: stacktrace = ''.join(traceback.format_stack()) [ 538.373841] env[69475]: [ 538.374545] env[69475]: ERROR nova.db.main.api [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 538.374545] env[69475]: result = function(*args, **kwargs) [ 538.374545] env[69475]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 538.374545] env[69475]: return func(*args, **kwargs) [ 538.374545] env[69475]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 538.374545] env[69475]: result = fn(*args, **kwargs) [ 538.374545] env[69475]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 538.374545] env[69475]: return f(*args, **kwargs) [ 538.374545] env[69475]: File "/opt/stack/nova/nova/objects/service.py", line 557, in _db_service_get_minimum_version [ 538.374545] env[69475]: return db.service_get_minimum_version(context, binaries) [ 538.374545] env[69475]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 538.374545] env[69475]: _check_db_access() [ 538.374545] env[69475]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 538.374545] env[69475]: stacktrace = ''.join(traceback.format_stack()) [ 538.374545] env[69475]: [ 538.374928] env[69475]: WARNING nova.objects.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] Failed to get minimum service version for cell 5948857a-efe9-4666-84a3-f6cafd451eb4 [ 538.375063] env[69475]: WARNING nova.objects.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 538.375473] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] Acquiring lock "singleton_lock" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 538.375632] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] Acquired lock "singleton_lock" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 538.375869] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] Releasing lock "singleton_lock" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 538.376208] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] Full set of CONF: {{(pid=69475) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 538.376353] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ******************************************************************************** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 538.376481] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] Configuration options gathered from: {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 538.376620] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 538.376811] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 538.376936] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ================================================================================ {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 538.377158] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] allow_resize_to_same_host = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.377327] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] arq_binding_timeout = 300 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.377455] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] backdoor_port = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.377579] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] backdoor_socket = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.377739] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] block_device_allocate_retries = 60 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.377897] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] block_device_allocate_retries_interval = 3 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.378073] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cert = self.pem {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.378241] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.378409] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] compute_monitors = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.378621] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] config_dir = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.378744] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] config_drive_format = iso9660 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.378879] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.379054] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] config_source = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.379228] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] console_host = devstack {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.379395] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] control_exchange = nova {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.379557] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cpu_allocation_ratio = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.379721] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] daemon = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.379890] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] debug = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.380060] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] default_access_ip_network_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.380231] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] default_availability_zone = nova {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.380389] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] default_ephemeral_format = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.380548] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] default_green_pool_size = 1000 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.380788] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.380961] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] default_schedule_zone = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.381201] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] disk_allocation_ratio = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.381376] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] enable_new_services = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.381558] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] enabled_apis = ['osapi_compute'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.381726] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] enabled_ssl_apis = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.381887] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] flat_injected = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.382066] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] force_config_drive = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.382231] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] force_raw_images = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.382399] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] graceful_shutdown_timeout = 5 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.382559] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] heal_instance_info_cache_interval = -1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.382778] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] host = cpu-1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.382955] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] initial_cpu_allocation_ratio = 4.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.383130] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] initial_disk_allocation_ratio = 1.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.383292] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] initial_ram_allocation_ratio = 1.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.383504] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.383667] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] instance_build_timeout = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.383828] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] instance_delete_interval = 300 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.383994] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] instance_format = [instance: %(uuid)s] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.384180] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] instance_name_template = instance-%08x {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.384342] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] instance_usage_audit = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.384509] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] instance_usage_audit_period = month {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.384672] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.384840] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] instances_path = /opt/stack/data/nova/instances {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.385010] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] internal_service_availability_zone = internal {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.385175] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] key = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.385366] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] live_migration_retry_count = 30 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.385537] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] log_color = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.385699] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] log_config_append = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.385864] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.386034] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] log_dir = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.386197] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] log_file = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.386324] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] log_options = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.386484] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] log_rotate_interval = 1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.386652] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] log_rotate_interval_type = days {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.386816] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] log_rotation_type = none {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.386943] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.387082] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.387247] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.387410] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.387535] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.387697] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] long_rpc_timeout = 1800 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.387859] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] max_concurrent_builds = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.388027] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] max_concurrent_live_migrations = 1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.388190] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] max_concurrent_snapshots = 5 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.388346] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] max_local_block_devices = 3 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.388503] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] max_logfile_count = 30 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.388665] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] max_logfile_size_mb = 200 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.388822] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] maximum_instance_delete_attempts = 5 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.388988] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] metadata_listen = 0.0.0.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.389167] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] metadata_listen_port = 8775 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.389334] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] metadata_workers = 2 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.389494] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] migrate_max_retries = -1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.389660] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] mkisofs_cmd = genisoimage {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.389862] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] my_block_storage_ip = 10.180.1.21 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.389994] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] my_ip = 10.180.1.21 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.390213] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.390376] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] network_allocate_retries = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.390555] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.390722] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] osapi_compute_listen = 0.0.0.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.390907] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] osapi_compute_listen_port = 8774 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.391065] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] osapi_compute_unique_server_name_scope = {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.391242] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] osapi_compute_workers = 2 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.391404] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] password_length = 12 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.391565] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] periodic_enable = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.391725] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] periodic_fuzzy_delay = 60 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.391893] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] pointer_model = usbtablet {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.392073] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] preallocate_images = none {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.392236] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] publish_errors = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.392365] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] pybasedir = /opt/stack/nova {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.392522] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ram_allocation_ratio = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.392686] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] rate_limit_burst = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.392852] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] rate_limit_except_level = CRITICAL {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.393015] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] rate_limit_interval = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.393180] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] reboot_timeout = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.393339] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] reclaim_instance_interval = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.393494] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] record = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.393661] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] reimage_timeout_per_gb = 60 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.393826] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] report_interval = 120 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.393983] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] rescue_timeout = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.394155] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] reserved_host_cpus = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.394314] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] reserved_host_disk_mb = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.394472] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] reserved_host_memory_mb = 512 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.394631] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] reserved_huge_pages = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.394792] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] resize_confirm_window = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.394950] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] resize_fs_using_block_device = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.395124] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] resume_guests_state_on_host_boot = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.395316] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.395487] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] rpc_response_timeout = 60 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.395648] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] run_external_periodic_tasks = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.395818] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] running_deleted_instance_action = reap {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.395976] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] running_deleted_instance_poll_interval = 1800 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.396147] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] running_deleted_instance_timeout = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.396307] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] scheduler_instance_sync_interval = 120 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.396476] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] service_down_time = 720 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.396642] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] servicegroup_driver = db {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.396799] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] shell_completion = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.396960] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] shelved_offload_time = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.397131] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] shelved_poll_interval = 3600 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.397300] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] shutdown_timeout = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.397459] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] source_is_ipv6 = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.397618] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ssl_only = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.397862] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.398049] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] sync_power_state_interval = 600 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.398218] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] sync_power_state_pool_size = 1000 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.398387] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] syslog_log_facility = LOG_USER {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.398544] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] tempdir = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.398708] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] timeout_nbd = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.398876] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] transport_url = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.399055] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] update_resources_interval = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.399224] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] use_cow_images = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.399384] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] use_journal = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.399544] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] use_json = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.399702] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] use_rootwrap_daemon = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.399856] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] use_stderr = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.400022] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] use_syslog = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.400182] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vcpu_pin_set = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.400348] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vif_plugging_is_fatal = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.400515] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vif_plugging_timeout = 300 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.400678] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] virt_mkfs = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.400838] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] volume_usage_poll_interval = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.400996] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] watch_log_file = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.401178] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] web = /usr/share/spice-html5 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 538.401368] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.401535] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.401697] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] os_brick.wait_mpath_device_interval = 1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.401868] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_concurrency.disable_process_locking = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.402428] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.402624] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.402798] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.402972] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_metrics.metrics_process_name = {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.403163] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.403335] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.403520] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.auth_strategy = keystone {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.403690] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.compute_link_prefix = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.403870] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.404061] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.dhcp_domain = novalocal {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.404241] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.enable_instance_password = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.404410] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.glance_link_prefix = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.404580] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.404756] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.instance_list_cells_batch_strategy = distributed {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.404923] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.instance_list_per_project_cells = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.405102] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.list_records_by_skipping_down_cells = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.405273] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.local_metadata_per_cell = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.405447] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.max_limit = 1000 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.405615] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.metadata_cache_expiration = 15 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.405792] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.neutron_default_tenant_id = default {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.405962] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.response_validation = warn {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.406157] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.use_neutron_default_nets = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.406330] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.406495] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.vendordata_dynamic_failure_fatal = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.406667] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.406842] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.vendordata_dynamic_ssl_certfile = {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.407017] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.vendordata_dynamic_targets = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.407187] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.vendordata_jsonfile_path = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.407368] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api.vendordata_providers = ['StaticJSON'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.407563] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.backend = dogpile.cache.memcached {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.407733] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.backend_argument = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.407898] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.backend_expiration_time = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.408083] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.config_prefix = cache.oslo {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.408259] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.dead_timeout = 60.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.408425] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.debug_cache_backend = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.408603] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.enable_retry_client = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.408761] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.enable_socket_keepalive = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.408932] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.enabled = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.409145] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.enforce_fips_mode = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.409320] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.expiration_time = 600 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.409484] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.hashclient_retry_attempts = 2 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.409650] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.hashclient_retry_delay = 1.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.409813] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.memcache_dead_retry = 300 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.409972] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.memcache_password = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.410150] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.410314] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.410480] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.memcache_pool_maxsize = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.410655] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.memcache_pool_unused_timeout = 60 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.410822] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.memcache_sasl_enabled = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.411006] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.memcache_servers = ['localhost:11211'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.411187] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.memcache_socket_timeout = 1.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.411350] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.memcache_username = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.411517] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.proxies = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.411680] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.redis_db = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.411841] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.redis_password = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.412013] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.redis_sentinel_service_name = mymaster {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.412196] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.412368] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.redis_server = localhost:6379 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.412533] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.redis_socket_timeout = 1.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.412694] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.redis_username = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.412858] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.retry_attempts = 2 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.413037] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.retry_delay = 0.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.413208] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.socket_keepalive_count = 1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.413372] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.socket_keepalive_idle = 1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.413537] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.socket_keepalive_interval = 1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.413696] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.tls_allowed_ciphers = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.413854] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.tls_cafile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.414025] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.tls_certfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.414195] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.tls_enabled = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.414353] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cache.tls_keyfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.414525] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cinder.auth_section = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.414699] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cinder.auth_type = password {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.414864] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cinder.cafile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.415050] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cinder.catalog_info = volumev3::publicURL {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.415218] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cinder.certfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.415384] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cinder.collect_timing = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.415549] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cinder.cross_az_attach = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.415714] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cinder.debug = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.415878] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cinder.endpoint_template = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.416055] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cinder.http_retries = 3 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.416223] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cinder.insecure = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.416382] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cinder.keyfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.416555] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cinder.os_region_name = RegionOne {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.416721] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cinder.split_loggers = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.416883] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cinder.timeout = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.417066] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.417231] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] compute.cpu_dedicated_set = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.417391] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] compute.cpu_shared_set = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.417556] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] compute.image_type_exclude_list = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.417729] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] compute.iothreads_usage_for_instances = none {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.417894] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] compute.live_migration_wait_for_vif_plug = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.418067] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] compute.max_concurrent_disk_ops = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.418232] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] compute.max_disk_devices_to_attach = -1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.418394] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.418564] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.418734] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] compute.resource_provider_association_refresh = 300 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.418901] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.419075] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] compute.shutdown_retry_interval = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.419263] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.419443] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] conductor.workers = 2 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.419621] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] console.allowed_origins = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.419780] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] console.ssl_ciphers = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.419949] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] console.ssl_minimum_version = default {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.420134] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] consoleauth.enforce_session_timeout = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.420304] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] consoleauth.token_ttl = 600 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.420473] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.cafile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.420630] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.certfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.420796] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.collect_timing = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.420955] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.connect_retries = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.421128] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.connect_retry_delay = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.421288] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.endpoint_override = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.421450] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.insecure = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.421615] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.keyfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.421769] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.max_version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.421926] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.min_version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.422095] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.region_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.422257] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.retriable_status_codes = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.422416] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.service_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.422588] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.service_type = accelerator {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.422750] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.split_loggers = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.422906] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.status_code_retries = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.423071] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.status_code_retry_delay = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.423232] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.timeout = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.423409] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.423569] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] cyborg.version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.423739] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.asyncio_connection = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.423897] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.asyncio_slave_connection = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.424074] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.backend = sqlalchemy {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.424246] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.connection = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.424411] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.connection_debug = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.424577] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.connection_parameters = {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.424741] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.connection_recycle_time = 3600 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.424905] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.connection_trace = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.425078] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.db_inc_retry_interval = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.425245] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.db_max_retries = 20 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.425407] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.db_max_retry_interval = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.425568] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.db_retry_interval = 1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.425733] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.max_overflow = 50 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.425893] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.max_pool_size = 5 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.426067] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.max_retries = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.426241] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.mysql_sql_mode = TRADITIONAL {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.426401] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.mysql_wsrep_sync_wait = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.426561] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.pool_timeout = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.426721] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.retry_interval = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.426883] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.slave_connection = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.427052] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.sqlite_synchronous = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.427216] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] database.use_db_reconnect = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.427383] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.asyncio_connection = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.427542] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.asyncio_slave_connection = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.427712] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.backend = sqlalchemy {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.427882] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.connection = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.428063] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.connection_debug = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.428234] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.connection_parameters = {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.428397] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.connection_recycle_time = 3600 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.428563] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.connection_trace = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.428730] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.db_inc_retry_interval = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.428897] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.db_max_retries = 20 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.429071] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.db_max_retry_interval = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.429236] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.db_retry_interval = 1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.429398] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.max_overflow = 50 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.429566] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.max_pool_size = 5 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.429740] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.max_retries = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.429908] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.430136] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.mysql_wsrep_sync_wait = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.430236] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.pool_timeout = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.430398] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.retry_interval = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.430554] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.slave_connection = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.430715] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] api_database.sqlite_synchronous = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.430891] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] devices.enabled_mdev_types = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.431081] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.431255] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ephemeral_storage_encryption.default_format = luks {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.431416] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ephemeral_storage_encryption.enabled = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.431577] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ephemeral_storage_encryption.key_size = 512 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.431747] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.api_servers = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.431909] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.cafile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.432077] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.certfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.432240] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.collect_timing = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.432396] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.connect_retries = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.432554] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.connect_retry_delay = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.432715] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.debug = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.432877] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.default_trusted_certificate_ids = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.433049] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.enable_certificate_validation = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.433213] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.enable_rbd_download = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.433369] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.endpoint_override = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.433533] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.insecure = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.433690] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.keyfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.433848] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.max_version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.434012] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.min_version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.434180] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.num_retries = 3 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.434349] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.rbd_ceph_conf = {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.434512] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.rbd_connect_timeout = 5 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.434681] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.rbd_pool = {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.434857] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.rbd_user = {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.435026] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.region_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.435190] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.retriable_status_codes = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.435347] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.service_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.435516] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.service_type = image {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.435679] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.split_loggers = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.435843] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.status_code_retries = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.436008] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.status_code_retry_delay = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.436177] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.timeout = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.436358] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.436522] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.verify_glance_signatures = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.436684] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] glance.version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.436850] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] guestfs.debug = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.437029] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.auth_section = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.437196] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.auth_type = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.437353] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.cafile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.437510] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.certfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.437672] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.collect_timing = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.437833] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.connect_retries = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.437989] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.connect_retry_delay = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.438157] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.endpoint_override = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.438317] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.insecure = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.438471] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.keyfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.438644] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.max_version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.438796] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.min_version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.438950] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.region_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.439117] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.retriable_status_codes = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.439275] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.service_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.439441] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.service_type = shared-file-system {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.439604] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.share_apply_policy_timeout = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.439766] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.split_loggers = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.439922] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.status_code_retries = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.440090] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.status_code_retry_delay = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.440250] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.timeout = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.440429] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.440588] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] manila.version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.440755] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] mks.enabled = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.441117] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.441307] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] image_cache.manager_interval = 2400 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.441476] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] image_cache.precache_concurrency = 1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.441645] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] image_cache.remove_unused_base_images = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.441817] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.441984] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.442176] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] image_cache.subdirectory_name = _base {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.442353] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.api_max_retries = 60 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.442517] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.api_retry_interval = 2 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.442676] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.auth_section = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.442839] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.auth_type = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.442998] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.cafile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.443170] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.certfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.443335] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.collect_timing = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.443497] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.conductor_group = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.444223] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.connect_retries = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.444223] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.connect_retry_delay = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.444223] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.endpoint_override = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.444223] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.insecure = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.444401] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.keyfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.444435] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.max_version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.444576] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.min_version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.444737] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.peer_list = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.444892] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.region_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.445064] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.retriable_status_codes = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.445230] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.serial_console_state_timeout = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.445386] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.service_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.445553] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.service_type = baremetal {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.445710] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.shard = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.445873] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.split_loggers = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.446038] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.status_code_retries = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.446201] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.status_code_retry_delay = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.446357] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.timeout = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.446532] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.446695] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ironic.version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.446877] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.447060] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] key_manager.fixed_key = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.447245] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.447408] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican.barbican_api_version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.447566] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican.barbican_endpoint = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.447735] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican.barbican_endpoint_type = public {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.447895] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican.barbican_region_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.448086] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican.cafile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.448252] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican.certfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.448416] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican.collect_timing = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.448600] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican.insecure = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.448737] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican.keyfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.448898] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican.number_of_retries = 60 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.449069] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican.retry_delay = 1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.449236] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican.send_service_user_token = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.449399] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican.split_loggers = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.449556] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican.timeout = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.449716] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican.verify_ssl = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.449874] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican.verify_ssl_path = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.450050] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican_service_user.auth_section = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.450216] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican_service_user.auth_type = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.450373] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican_service_user.cafile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.450527] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican_service_user.certfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.450688] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican_service_user.collect_timing = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.450850] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican_service_user.insecure = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.451012] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican_service_user.keyfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.451178] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican_service_user.split_loggers = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.451336] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] barbican_service_user.timeout = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.451498] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vault.approle_role_id = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.451655] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vault.approle_secret_id = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.451823] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vault.kv_mountpoint = secret {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.451983] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vault.kv_path = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.452159] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vault.kv_version = 2 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.452318] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vault.namespace = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.452476] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vault.root_token_id = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.452632] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vault.ssl_ca_crt_file = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.452800] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vault.timeout = 60.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.452963] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vault.use_ssl = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.453144] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.453313] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.cafile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.453469] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.certfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.453630] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.collect_timing = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.453789] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.connect_retries = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.453947] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.connect_retry_delay = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.454121] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.endpoint_override = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.454581] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.insecure = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.454581] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.keyfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.454581] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.max_version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.454719] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.min_version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.454880] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.region_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.455048] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.retriable_status_codes = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.455206] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.service_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.455373] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.service_type = identity {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.455532] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.split_loggers = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.455687] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.status_code_retries = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.455845] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.status_code_retry_delay = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.456006] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.timeout = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.456190] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.456348] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] keystone.version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.456534] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.ceph_mount_options = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.456835] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.457024] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.connection_uri = {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.457193] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.cpu_mode = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.457360] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.cpu_model_extra_flags = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.457525] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.cpu_models = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.457695] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.cpu_power_governor_high = performance {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.457862] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.cpu_power_governor_low = powersave {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.458054] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.cpu_power_management = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.458243] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.458413] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.device_detach_attempts = 8 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.458596] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.device_detach_timeout = 20 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.458738] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.disk_cachemodes = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.458896] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.disk_prefix = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.459070] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.enabled_perf_events = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.459235] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.file_backed_memory = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.459400] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.gid_maps = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.459558] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.hw_disk_discard = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.459717] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.hw_machine_type = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.459881] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.images_rbd_ceph_conf = {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.460054] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.460221] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.460391] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.images_rbd_glance_store_name = {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.460560] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.images_rbd_pool = rbd {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.460727] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.images_type = default {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.460888] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.images_volume_group = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.461060] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.inject_key = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.461225] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.inject_partition = -2 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.461384] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.inject_password = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.461543] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.iscsi_iface = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.461748] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.iser_use_multipath = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.461861] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.live_migration_bandwidth = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.462032] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.live_migration_completion_timeout = 800 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.462198] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.live_migration_downtime = 500 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.462359] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.live_migration_downtime_delay = 75 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.462517] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.live_migration_downtime_steps = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.462675] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.live_migration_inbound_addr = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.462837] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.live_migration_permit_auto_converge = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.462997] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.live_migration_permit_post_copy = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.463169] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.live_migration_scheme = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.463338] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.live_migration_timeout_action = abort {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.463498] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.live_migration_tunnelled = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.463656] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.live_migration_uri = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.463817] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.live_migration_with_native_tls = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.463973] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.max_queues = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.464146] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.mem_stats_period_seconds = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.464375] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.464538] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.nfs_mount_options = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.464821] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.464997] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.num_aoe_discover_tries = 3 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.465175] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.num_iser_scan_tries = 5 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.465337] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.num_memory_encrypted_guests = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.465500] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.num_nvme_discover_tries = 5 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.465662] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.num_pcie_ports = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.465831] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.num_volume_scan_tries = 5 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.465994] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.pmem_namespaces = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.466168] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.quobyte_client_cfg = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.466457] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.466633] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.rbd_connect_timeout = 5 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.466807] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.466959] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.467197] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.rbd_secret_uuid = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.467290] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.rbd_user = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.467449] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.realtime_scheduler_priority = 1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.467621] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.remote_filesystem_transport = ssh {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.467781] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.rescue_image_id = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.467937] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.rescue_kernel_id = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.468106] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.rescue_ramdisk_id = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.468279] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.rng_dev_path = /dev/urandom {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.468438] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.rx_queue_size = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.468613] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.smbfs_mount_options = {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.468898] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.469087] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.snapshot_compression = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.469257] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.snapshot_image_format = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.469483] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.469651] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.sparse_logical_volumes = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.469816] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.swtpm_enabled = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.469987] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.swtpm_group = tss {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.470170] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.swtpm_user = tss {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.470341] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.sysinfo_serial = unique {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.470501] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.tb_cache_size = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.470659] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.tx_queue_size = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.470825] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.uid_maps = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.470989] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.use_virtio_for_bridges = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.471174] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.virt_type = kvm {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.471344] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.volume_clear = zero {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.471508] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.volume_clear_size = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.471673] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.volume_enforce_multipath = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.471848] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.volume_use_multipath = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.472012] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.vzstorage_cache_path = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.472186] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.472354] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.vzstorage_mount_group = qemu {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.472518] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.vzstorage_mount_opts = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.472686] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.472972] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.473167] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.vzstorage_mount_user = stack {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.473336] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.473509] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.auth_section = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.473683] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.auth_type = password {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.473845] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.cafile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.474009] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.certfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.474181] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.collect_timing = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.474339] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.connect_retries = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.474495] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.connect_retry_delay = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.474665] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.default_floating_pool = public {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.474828] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.endpoint_override = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.474987] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.extension_sync_interval = 600 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.475170] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.http_retries = 3 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.475334] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.insecure = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.475495] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.keyfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.475653] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.max_version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.475824] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.metadata_proxy_shared_secret = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.475983] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.min_version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.476167] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.ovs_bridge = br-int {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.476332] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.physnets = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.476503] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.region_name = RegionOne {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.476664] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.retriable_status_codes = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.476835] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.service_metadata_proxy = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.476995] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.service_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.477178] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.service_type = network {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.477516] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.split_loggers = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.477516] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.status_code_retries = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.477653] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.status_code_retry_delay = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.477816] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.timeout = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.477994] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.478168] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] neutron.version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.478347] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] notifications.bdms_in_notifications = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.478524] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] notifications.default_level = INFO {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.478699] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] notifications.include_share_mapping = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.478870] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] notifications.notification_format = unversioned {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.479047] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] notifications.notify_on_state_change = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.479227] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.479404] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] pci.alias = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.479576] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] pci.device_spec = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.479743] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] pci.report_in_placement = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.479911] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.auth_section = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.480095] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.auth_type = password {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.480268] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.auth_url = http://10.180.1.21/identity {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.480428] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.cafile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.480584] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.certfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.480748] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.collect_timing = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.480906] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.connect_retries = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.481074] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.connect_retry_delay = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.481236] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.default_domain_id = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.481393] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.default_domain_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.481549] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.domain_id = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.481706] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.domain_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.481862] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.endpoint_override = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.482030] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.insecure = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.482189] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.keyfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.482347] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.max_version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.482502] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.min_version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.482670] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.password = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.482828] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.project_domain_id = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.482994] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.project_domain_name = Default {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.483175] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.project_id = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.483346] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.project_name = service {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.483512] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.region_name = RegionOne {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.483675] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.retriable_status_codes = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.483833] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.service_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.483999] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.service_type = placement {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.484177] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.split_loggers = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.484334] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.status_code_retries = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.484490] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.status_code_retry_delay = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.484647] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.system_scope = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.484804] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.timeout = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.484959] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.trust_id = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.485128] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.user_domain_id = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.485297] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.user_domain_name = Default {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.485454] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.user_id = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.485625] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.username = nova {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.485807] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.485968] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] placement.version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.486165] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] quota.cores = 20 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.486331] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] quota.count_usage_from_placement = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.486502] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.486668] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] quota.injected_file_content_bytes = 10240 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.486832] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] quota.injected_file_path_length = 255 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.486996] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] quota.injected_files = 5 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.487174] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] quota.instances = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.487341] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] quota.key_pairs = 100 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.487504] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] quota.metadata_items = 128 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.487669] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] quota.ram = 51200 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.487834] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] quota.recheck_quota = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.488009] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] quota.server_group_members = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.488181] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] quota.server_groups = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.488392] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] quota.unified_limits_resource_list = ['servers'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.488603] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] quota.unified_limits_resource_strategy = require {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.488747] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.488912] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.489088] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] scheduler.image_metadata_prefilter = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.489254] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.489420] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] scheduler.max_attempts = 3 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.489584] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] scheduler.max_placement_results = 1000 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.489750] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.489911] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] scheduler.query_placement_for_image_type_support = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.490082] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.490261] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] scheduler.workers = 2 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.490449] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.490623] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.490803] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.490970] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.491150] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.491316] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.491479] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.491669] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.491840] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.host_subset_size = 1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.492012] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.492179] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.image_properties_default_architecture = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.492340] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.492514] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.image_props_weight_setting = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.492678] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.492839] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.isolated_hosts = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.492998] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.isolated_images = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.493172] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.max_instances_per_host = 50 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.493330] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.493490] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.493650] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.pci_in_placement = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.493811] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.493969] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.494142] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.494299] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.494459] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.494617] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.494776] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.track_instance_changes = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.494947] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.495130] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] metrics.required = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.495297] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] metrics.weight_multiplier = 1.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.495461] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] metrics.weight_of_unavailable = -10000.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.495625] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] metrics.weight_setting = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.495934] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.496123] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] serial_console.enabled = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.496303] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] serial_console.port_range = 10000:20000 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.496475] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.496646] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.496814] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] serial_console.serialproxy_port = 6083 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.497208] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] service_user.auth_section = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.497208] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] service_user.auth_type = password {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.497308] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] service_user.cafile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.497459] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] service_user.certfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.497621] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] service_user.collect_timing = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.497779] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] service_user.insecure = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.497933] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] service_user.keyfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.498114] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] service_user.send_service_user_token = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.498280] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] service_user.split_loggers = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.498437] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] service_user.timeout = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.498639] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] spice.agent_enabled = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.498776] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] spice.enabled = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.499081] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.499292] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] spice.html5proxy_host = 0.0.0.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.499463] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] spice.html5proxy_port = 6082 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.499625] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] spice.image_compression = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.499785] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] spice.jpeg_compression = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.499943] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] spice.playback_compression = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.500116] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] spice.require_secure = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.500287] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] spice.server_listen = 127.0.0.1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.500455] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.500731] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.500900] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] spice.streaming_mode = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.501072] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] spice.zlib_compression = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.501242] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] upgrade_levels.baseapi = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.501411] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] upgrade_levels.compute = auto {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.501567] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] upgrade_levels.conductor = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.501724] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] upgrade_levels.scheduler = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.501886] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vendordata_dynamic_auth.auth_section = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.502055] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vendordata_dynamic_auth.auth_type = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.502215] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vendordata_dynamic_auth.cafile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.502372] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vendordata_dynamic_auth.certfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.502534] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vendordata_dynamic_auth.collect_timing = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.502694] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vendordata_dynamic_auth.insecure = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.502851] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vendordata_dynamic_auth.keyfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.503012] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vendordata_dynamic_auth.split_loggers = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.503174] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vendordata_dynamic_auth.timeout = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.503345] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.api_retry_count = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.503502] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.ca_file = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.503671] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.cache_prefix = devstack-image-cache {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.503838] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.cluster_name = testcl1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.503999] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.connection_pool_size = 10 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.504168] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.console_delay_seconds = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.504335] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.datastore_regex = ^datastore.* {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.504541] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.504715] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.host_password = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.504879] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.host_port = 443 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.505059] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.host_username = administrator@vsphere.local {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.505231] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.insecure = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.505392] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.integration_bridge = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.505556] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.maximum_objects = 100 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.505713] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.pbm_default_policy = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.505873] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.pbm_enabled = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.506037] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.pbm_wsdl_location = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.506207] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.506366] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.serial_port_proxy_uri = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.506523] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.serial_port_service_uri = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.506690] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.task_poll_interval = 0.5 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.506861] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.use_linked_clone = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.507040] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.vnc_keymap = en-us {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.507210] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.vnc_port = 5900 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.507373] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vmware.vnc_port_total = 10000 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.507558] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vnc.auth_schemes = ['none'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.507731] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vnc.enabled = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.508030] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.508221] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.508390] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vnc.novncproxy_port = 6080 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.508631] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vnc.server_listen = 127.0.0.1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.508792] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.508952] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vnc.vencrypt_ca_certs = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.509129] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vnc.vencrypt_client_cert = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.509288] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vnc.vencrypt_client_key = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.509461] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.509630] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.disable_deep_image_inspection = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.509788] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.disable_fallback_pcpu_query = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.509951] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.disable_group_policy_check_upcall = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.510125] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.510287] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.disable_rootwrap = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.510449] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.enable_numa_live_migration = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.510609] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.510769] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.510929] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.handle_virt_lifecycle_events = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.511097] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.libvirt_disable_apic = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.511260] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.never_download_image_if_on_rbd = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.511420] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.511581] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.511744] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.511902] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.512071] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.512234] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.512391] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.512558] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.512725] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.512909] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.513093] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] wsgi.client_socket_timeout = 900 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.513262] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] wsgi.default_pool_size = 1000 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.513425] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] wsgi.keep_alive = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.513588] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] wsgi.max_header_line = 16384 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.513751] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] wsgi.secure_proxy_ssl_header = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.513908] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] wsgi.ssl_ca_file = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.514075] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] wsgi.ssl_cert_file = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.514238] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] wsgi.ssl_key_file = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.514404] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] wsgi.tcp_keepidle = 600 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.514586] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.514756] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] zvm.ca_file = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.514915] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] zvm.cloud_connector_url = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.515206] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.515383] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] zvm.reachable_timeout = 300 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.515555] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.515731] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.515907] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] profiler.connection_string = messaging:// {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.516085] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] profiler.enabled = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.516255] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] profiler.es_doc_type = notification {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.516418] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] profiler.es_scroll_size = 10000 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.516586] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] profiler.es_scroll_time = 2m {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.516750] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] profiler.filter_error_trace = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.516916] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] profiler.hmac_keys = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.517094] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] profiler.sentinel_service_name = mymaster {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.517264] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] profiler.socket_timeout = 0.1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.517430] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] profiler.trace_requests = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.517590] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] profiler.trace_sqlalchemy = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.517780] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] profiler_jaeger.process_tags = {} {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.517938] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] profiler_jaeger.service_name_prefix = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.518111] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] profiler_otlp.service_name_prefix = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.518282] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] remote_debug.host = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.518440] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] remote_debug.port = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.518619] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.518777] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.518936] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.519107] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.519268] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.519424] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.519588] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.519742] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.519899] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.520077] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.hostname = devstack {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.520239] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.520407] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.520573] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.520737] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.520900] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.521078] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.521244] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.521404] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.521570] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.521734] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.521892] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.522065] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.522229] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.522391] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.522550] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.522709] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.522869] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.523037] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.523200] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.523362] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.523520] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.ssl = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.523688] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.523854] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.524026] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.524199] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.524365] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.ssl_version = {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.524528] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.524711] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.524883] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_notifications.retry = -1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.525066] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.525244] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_messaging_notifications.transport_url = **** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.525421] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.auth_section = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.525583] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.auth_type = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.525744] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.cafile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.525901] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.certfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.526073] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.collect_timing = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.526236] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.connect_retries = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.526394] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.connect_retry_delay = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.526551] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.endpoint_id = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.526720] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.endpoint_interface = publicURL {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.526877] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.endpoint_override = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.527042] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.endpoint_region_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.527204] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.endpoint_service_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.527361] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.endpoint_service_type = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.527521] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.insecure = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.527676] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.keyfile = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.527833] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.max_version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.527988] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.min_version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.528156] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.region_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.528314] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.retriable_status_codes = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.528470] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.service_name = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.528631] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.service_type = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.528788] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.split_loggers = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.528943] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.status_code_retries = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.529117] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.status_code_retry_delay = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.529277] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.timeout = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.529431] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.valid_interfaces = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.529593] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_limit.version = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.529767] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_reports.file_event_handler = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.529928] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_reports.file_event_handler_interval = 1 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.530103] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] oslo_reports.log_dir = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.530277] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.530436] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vif_plug_linux_bridge_privileged.group = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.530591] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.530757] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.530917] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.531088] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vif_plug_linux_bridge_privileged.user = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.531259] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.531416] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vif_plug_ovs_privileged.group = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.531572] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vif_plug_ovs_privileged.helper_command = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.531738] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.531899] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.532066] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] vif_plug_ovs_privileged.user = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.532238] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] os_vif_linux_bridge.flat_interface = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.532416] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.532587] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.532758] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.532926] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.533109] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.533277] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.533438] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] os_vif_linux_bridge.vlan_interface = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.533614] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.533784] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] os_vif_ovs.isolate_vif = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.533951] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.534128] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.534299] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.534474] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] os_vif_ovs.ovsdb_interface = native {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.534637] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] os_vif_ovs.per_port_bridge = False {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.534808] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] privsep_osbrick.capabilities = [21] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.534967] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] privsep_osbrick.group = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.535143] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] privsep_osbrick.helper_command = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.535307] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.535468] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] privsep_osbrick.thread_pool_size = 8 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.535624] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] privsep_osbrick.user = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.535798] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.535954] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] nova_sys_admin.group = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.536123] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] nova_sys_admin.helper_command = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.536290] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.536450] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] nova_sys_admin.thread_pool_size = 8 {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.536606] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] nova_sys_admin.user = None {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 538.536737] env[69475]: DEBUG oslo_service.backend.eventlet.service [None req-3c41d335-4029-420d-beeb-e15a9ec48abd None None] ******************************************************************************** {{(pid=69475) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 538.537151] env[69475]: INFO nova.service [-] Starting compute node (version 31.0.1) [ 539.041066] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Getting list of instances from cluster (obj){ [ 539.041066] env[69475]: value = "domain-c8" [ 539.041066] env[69475]: _type = "ClusterComputeResource" [ 539.041066] env[69475]: } {{(pid=69475) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 539.042131] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41186d1c-8650-4310-82ce-b4fc5ede3432 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.051135] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Got total of 0 instances {{(pid=69475) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 539.051724] env[69475]: WARNING nova.virt.vmwareapi.driver [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 539.052209] env[69475]: INFO nova.virt.node [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Generated node identity dd221100-68c1-4a75-92b5-b24d81fee5da [ 539.052442] env[69475]: INFO nova.virt.node [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Wrote node identity dd221100-68c1-4a75-92b5-b24d81fee5da to /opt/stack/data/n-cpu-1/compute_id [ 539.555464] env[69475]: WARNING nova.compute.manager [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Compute nodes ['dd221100-68c1-4a75-92b5-b24d81fee5da'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 540.561635] env[69475]: INFO nova.compute.manager [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 541.567323] env[69475]: WARNING nova.compute.manager [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 541.567731] env[69475]: DEBUG oslo_concurrency.lockutils [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 541.567823] env[69475]: DEBUG oslo_concurrency.lockutils [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 541.567963] env[69475]: DEBUG oslo_concurrency.lockutils [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 541.568128] env[69475]: DEBUG nova.compute.resource_tracker [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69475) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 541.569465] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-929b450e-18f8-49a8-9af2-9aae00c1d121 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.577699] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3eae7ee-1a54-4b4a-9ee6-b34657148162 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.591092] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8de173dd-367a-4401-aa49-0e612728f023 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.597341] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989df19f-4ede-4a80-8cb0-74dafff5da0c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.625380] env[69475]: DEBUG nova.compute.resource_tracker [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180900MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=69475) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 541.625474] env[69475]: DEBUG oslo_concurrency.lockutils [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 541.625680] env[69475]: DEBUG oslo_concurrency.lockutils [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 542.128213] env[69475]: WARNING nova.compute.resource_tracker [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] No compute node record for cpu-1:dd221100-68c1-4a75-92b5-b24d81fee5da: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host dd221100-68c1-4a75-92b5-b24d81fee5da could not be found. [ 542.631918] env[69475]: INFO nova.compute.resource_tracker [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: dd221100-68c1-4a75-92b5-b24d81fee5da [ 544.140751] env[69475]: DEBUG nova.compute.resource_tracker [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 544.141146] env[69475]: DEBUG nova.compute.resource_tracker [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 544.295270] env[69475]: INFO nova.scheduler.client.report [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] [req-82a054ca-4d39-495a-83ee-48f8110b2785] Created resource provider record via placement API for resource provider with UUID dd221100-68c1-4a75-92b5-b24d81fee5da and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 544.313116] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d02178e-6529-47f6-9854-7adfa60170f0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.321059] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f46110-8faf-48c9-8782-91d0d31bfa2f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.350186] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61eaf70-d687-44cf-9e75-0ba120e3b195 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.357687] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3bc206e-66f8-449c-9e17-5a37f4f9070b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.370654] env[69475]: DEBUG nova.compute.provider_tree [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 544.906699] env[69475]: DEBUG nova.scheduler.client.report [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Updated inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 544.906940] env[69475]: DEBUG nova.compute.provider_tree [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Updating resource provider dd221100-68c1-4a75-92b5-b24d81fee5da generation from 0 to 1 during operation: update_inventory {{(pid=69475) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 544.907092] env[69475]: DEBUG nova.compute.provider_tree [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 544.954863] env[69475]: DEBUG nova.compute.provider_tree [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Updating resource provider dd221100-68c1-4a75-92b5-b24d81fee5da generation from 1 to 2 during operation: update_traits {{(pid=69475) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 545.459603] env[69475]: DEBUG nova.compute.resource_tracker [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69475) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 545.459875] env[69475]: DEBUG oslo_concurrency.lockutils [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.834s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 545.460072] env[69475]: DEBUG nova.service [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Creating RPC server for service compute {{(pid=69475) start /opt/stack/nova/nova/service.py:186}} [ 545.475641] env[69475]: DEBUG nova.service [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] Join ServiceGroup membership for this service compute {{(pid=69475) start /opt/stack/nova/nova/service.py:203}} [ 545.475861] env[69475]: DEBUG nova.servicegroup.drivers.db [None req-26b92680-71ac-4997-a2b0-cb162ebc3d58 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=69475) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 585.761621] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "91d5b0db-63a5-4290-af9b-264a5ce4cd95" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 585.761982] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "91d5b0db-63a5-4290-af9b-264a5ce4cd95" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 586.207027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquiring lock "dc2614b1-95b8-4887-8ca6-efe92921c926" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 586.207027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Lock "dc2614b1-95b8-4887-8ca6-efe92921c926" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 586.265786] env[69475]: DEBUG nova.compute.manager [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 586.710596] env[69475]: DEBUG nova.compute.manager [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 586.718093] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Acquiring lock "ec7a6b3c-2a2f-4edd-8b79-ba55551d6159" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 586.718594] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Lock "ec7a6b3c-2a2f-4edd-8b79-ba55551d6159" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 586.810938] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 586.811258] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 586.813867] env[69475]: INFO nova.compute.claims [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 586.996094] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Acquiring lock "e1ecc905-22da-434a-8ddf-a66f88ab47fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 586.996407] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Lock "e1ecc905-22da-434a-8ddf-a66f88ab47fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 587.222160] env[69475]: DEBUG nova.compute.manager [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 587.250625] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 587.464528] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquiring lock "4465f156-09cc-4eba-90e4-be76f3010363" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 587.464781] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "4465f156-09cc-4eba-90e4-be76f3010363" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 587.502663] env[69475]: DEBUG nova.compute.manager [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 587.764418] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 587.967757] env[69475]: DEBUG nova.compute.manager [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 587.984362] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf857fd-2027-42af-bc39-4c25a0992345 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.993035] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b51bfe5d-d193-468b-a066-e21ff2a934e5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.039674] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb3971b-9f16-42ce-99d3-16f60089c0b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.048045] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Acquiring lock "48bc79bc-df56-4523-808f-a71b391062b9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 588.048045] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Lock "48bc79bc-df56-4523-808f-a71b391062b9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 588.055361] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cf9b364-504d-4963-a45a-f3ef38f3bc5e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.062755] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 588.072786] env[69475]: DEBUG nova.compute.provider_tree [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 588.495664] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 588.560333] env[69475]: DEBUG nova.compute.manager [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 588.577530] env[69475]: DEBUG nova.scheduler.client.report [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 589.085358] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.274s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 589.086159] env[69475]: DEBUG nova.compute.manager [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 589.089956] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 589.090636] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.840s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 589.093087] env[69475]: INFO nova.compute.claims [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 589.604144] env[69475]: DEBUG nova.compute.utils [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 589.606698] env[69475]: DEBUG nova.compute.manager [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 589.606698] env[69475]: DEBUG nova.network.neutron [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 590.115031] env[69475]: DEBUG nova.compute.manager [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 590.169374] env[69475]: DEBUG nova.policy [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42c54237c534486d86b3a161149fd013', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e760df406d80477a9a7c4d345093d3db', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 590.261381] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee495749-ee1a-4c56-bc4d-8267fd6940fb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.271899] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4d0281-4199-418d-8518-97c2b30bfae5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.309925] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25de384-3e7e-466c-88c1-63e6c67ecf89 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.318592] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487e8e75-912e-4af4-8666-a17015feb472 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.339039] env[69475]: DEBUG nova.compute.provider_tree [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 590.477586] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._sync_power_states {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 590.845648] env[69475]: DEBUG nova.scheduler.client.report [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 590.859926] env[69475]: DEBUG nova.network.neutron [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Successfully created port: 595d3b80-121a-4ab1-9ece-34303f1a5b18 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 590.981827] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Getting list of instances from cluster (obj){ [ 590.981827] env[69475]: value = "domain-c8" [ 590.981827] env[69475]: _type = "ClusterComputeResource" [ 590.981827] env[69475]: } {{(pid=69475) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 590.985115] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e17405-6099-4c11-bfc4-90288d485195 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.996308] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Got total of 0 instances {{(pid=69475) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 590.996308] env[69475]: WARNING nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] While synchronizing instance power states, found 2 instances in the database and 0 instances on the hypervisor. [ 590.996433] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Triggering sync for uuid 91d5b0db-63a5-4290-af9b-264a5ce4cd95 {{(pid=69475) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 590.998028] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Triggering sync for uuid dc2614b1-95b8-4887-8ca6-efe92921c926 {{(pid=69475) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 590.998028] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "91d5b0db-63a5-4290-af9b-264a5ce4cd95" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 590.998028] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "dc2614b1-95b8-4887-8ca6-efe92921c926" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 590.998028] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 590.998028] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Getting list of instances from cluster (obj){ [ 590.998028] env[69475]: value = "domain-c8" [ 590.998028] env[69475]: _type = "ClusterComputeResource" [ 590.998028] env[69475]: } {{(pid=69475) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 590.999472] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ddaaff-cb80-4a21-b799-914c918c5999 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.009039] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Got total of 0 instances {{(pid=69475) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 591.127744] env[69475]: DEBUG nova.compute.manager [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 591.168211] env[69475]: DEBUG nova.virt.hardware [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 591.168211] env[69475]: DEBUG nova.virt.hardware [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 591.168211] env[69475]: DEBUG nova.virt.hardware [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 591.168660] env[69475]: DEBUG nova.virt.hardware [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 591.168997] env[69475]: DEBUG nova.virt.hardware [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 591.169677] env[69475]: DEBUG nova.virt.hardware [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 591.170035] env[69475]: DEBUG nova.virt.hardware [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 591.170326] env[69475]: DEBUG nova.virt.hardware [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 591.170835] env[69475]: DEBUG nova.virt.hardware [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 591.171887] env[69475]: DEBUG nova.virt.hardware [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 591.172824] env[69475]: DEBUG nova.virt.hardware [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 591.174680] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce58007a-6453-4258-a744-02d2b242ee23 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.185308] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e0aa46-d505-4a00-853e-66ee95f60de9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.204681] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82ef3f1c-e899-4546-b4e6-cd22060bf5ac {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.355954] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.265s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 591.357587] env[69475]: DEBUG nova.compute.manager [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 591.360888] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.601s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 591.363080] env[69475]: INFO nova.compute.claims [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 591.868039] env[69475]: DEBUG nova.compute.utils [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 591.872628] env[69475]: DEBUG nova.compute.manager [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 591.872868] env[69475]: DEBUG nova.network.neutron [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 592.027949] env[69475]: DEBUG nova.policy [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9309c0eabe544a64afa0f8332a5b7abd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4238057618546babe78b7b37966652e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 592.198942] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Acquiring lock "7be48799-ea4a-4e7f-95c2-637460596cfc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 592.199302] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Lock "7be48799-ea4a-4e7f-95c2-637460596cfc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 592.373858] env[69475]: DEBUG nova.compute.manager [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 592.526085] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb86791-42c2-46cc-8953-21a77ebbe07f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.535126] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93e5b21d-af3e-4729-ae79-ca65dcd03bf3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.574684] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2bc2b2f-4e99-47f0-b508-0df382099c90 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.583236] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf7cf11-1d65-4a61-9a2d-0ae3c97240e7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.598831] env[69475]: DEBUG nova.compute.provider_tree [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.703903] env[69475]: DEBUG nova.compute.manager [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 592.856751] env[69475]: DEBUG nova.network.neutron [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Successfully created port: fd190b86-eed6-4857-9dcb-7fc4a209989d {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 593.102613] env[69475]: DEBUG nova.scheduler.client.report [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 593.240015] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 593.392682] env[69475]: DEBUG nova.compute.manager [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 593.423524] env[69475]: DEBUG nova.virt.hardware [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 593.423772] env[69475]: DEBUG nova.virt.hardware [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 593.423925] env[69475]: DEBUG nova.virt.hardware [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 593.424120] env[69475]: DEBUG nova.virt.hardware [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 593.424264] env[69475]: DEBUG nova.virt.hardware [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 593.424407] env[69475]: DEBUG nova.virt.hardware [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 593.424612] env[69475]: DEBUG nova.virt.hardware [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 593.424824] env[69475]: DEBUG nova.virt.hardware [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 593.424926] env[69475]: DEBUG nova.virt.hardware [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 593.425174] env[69475]: DEBUG nova.virt.hardware [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 593.425365] env[69475]: DEBUG nova.virt.hardware [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 593.426420] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27b0aa76-2ca3-4545-a3a5-a54b4cfb0092 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.436984] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ca98b8-6f3b-42f2-abaf-8e5f348d4d17 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.614636] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.254s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 593.615997] env[69475]: DEBUG nova.compute.manager [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 593.622300] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.560s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 593.624050] env[69475]: INFO nova.compute.claims [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 593.885662] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 593.885930] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 593.886140] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 593.886437] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 593.886556] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 593.886714] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 593.886917] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 593.887150] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69475) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 593.887314] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager.update_available_resource {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 594.124109] env[69475]: DEBUG nova.compute.utils [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 594.125528] env[69475]: DEBUG nova.compute.manager [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 594.125842] env[69475]: DEBUG nova.network.neutron [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 594.133760] env[69475]: DEBUG nova.network.neutron [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Successfully updated port: 595d3b80-121a-4ab1-9ece-34303f1a5b18 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 594.170342] env[69475]: DEBUG nova.policy [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '797932bbc404490095ebf23c94f9bc0c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c5e8e7c2b47143ada53b9ba98d18f697', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 594.390704] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 594.630492] env[69475]: DEBUG nova.compute.manager [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 594.640647] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "refresh_cache-91d5b0db-63a5-4290-af9b-264a5ce4cd95" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.642641] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquired lock "refresh_cache-91d5b0db-63a5-4290-af9b-264a5ce4cd95" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 594.642914] env[69475]: DEBUG nova.network.neutron [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 594.782200] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb0edca-d98c-4a4b-995e-b2b7c9aa463b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.792609] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb9245d-35d4-41ad-8a64-e16814e3fa0a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.826994] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61747da6-c45f-40dc-af2a-9391857ea49e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.836675] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905bf30c-bb4d-4cb7-b6b6-3215f8efdb3a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.853667] env[69475]: DEBUG nova.compute.provider_tree [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 594.916388] env[69475]: DEBUG nova.network.neutron [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Successfully created port: 77ecc630-733d-4acd-8e33-5354c11dd9a3 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 595.361096] env[69475]: DEBUG nova.scheduler.client.report [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 595.566020] env[69475]: DEBUG nova.network.neutron [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 595.598696] env[69475]: DEBUG nova.compute.manager [req-eb3755e5-fefd-4267-8140-80ff38fb2a62 req-ebadb0ea-e7f9-4307-bf94-503e86e52909 service nova] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Received event network-vif-plugged-595d3b80-121a-4ab1-9ece-34303f1a5b18 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 595.598696] env[69475]: DEBUG oslo_concurrency.lockutils [req-eb3755e5-fefd-4267-8140-80ff38fb2a62 req-ebadb0ea-e7f9-4307-bf94-503e86e52909 service nova] Acquiring lock "91d5b0db-63a5-4290-af9b-264a5ce4cd95-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 595.598696] env[69475]: DEBUG oslo_concurrency.lockutils [req-eb3755e5-fefd-4267-8140-80ff38fb2a62 req-ebadb0ea-e7f9-4307-bf94-503e86e52909 service nova] Lock "91d5b0db-63a5-4290-af9b-264a5ce4cd95-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.598997] env[69475]: DEBUG oslo_concurrency.lockutils [req-eb3755e5-fefd-4267-8140-80ff38fb2a62 req-ebadb0ea-e7f9-4307-bf94-503e86e52909 service nova] Lock "91d5b0db-63a5-4290-af9b-264a5ce4cd95-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.598997] env[69475]: DEBUG nova.compute.manager [req-eb3755e5-fefd-4267-8140-80ff38fb2a62 req-ebadb0ea-e7f9-4307-bf94-503e86e52909 service nova] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] No waiting events found dispatching network-vif-plugged-595d3b80-121a-4ab1-9ece-34303f1a5b18 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 595.599236] env[69475]: WARNING nova.compute.manager [req-eb3755e5-fefd-4267-8140-80ff38fb2a62 req-ebadb0ea-e7f9-4307-bf94-503e86e52909 service nova] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Received unexpected event network-vif-plugged-595d3b80-121a-4ab1-9ece-34303f1a5b18 for instance with vm_state building and task_state spawning. [ 595.646298] env[69475]: DEBUG nova.compute.manager [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 595.675580] env[69475]: DEBUG nova.virt.hardware [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 595.676086] env[69475]: DEBUG nova.virt.hardware [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 595.676086] env[69475]: DEBUG nova.virt.hardware [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 595.676301] env[69475]: DEBUG nova.virt.hardware [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 595.676449] env[69475]: DEBUG nova.virt.hardware [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 595.676597] env[69475]: DEBUG nova.virt.hardware [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 595.676800] env[69475]: DEBUG nova.virt.hardware [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 595.676969] env[69475]: DEBUG nova.virt.hardware [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 595.677272] env[69475]: DEBUG nova.virt.hardware [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 595.677460] env[69475]: DEBUG nova.virt.hardware [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 595.677779] env[69475]: DEBUG nova.virt.hardware [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 595.678530] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c9d63ef-0aeb-41a1-9731-1e8b4f6629a6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.687849] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0feff997-4475-41ef-b94e-31fdaa6f82a1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.869007] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.246s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.869214] env[69475]: DEBUG nova.compute.manager [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 595.873762] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.378s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.875011] env[69475]: INFO nova.compute.claims [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 596.262403] env[69475]: DEBUG nova.network.neutron [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Updating instance_info_cache with network_info: [{"id": "595d3b80-121a-4ab1-9ece-34303f1a5b18", "address": "fa:16:3e:29:a5:ff", "network": {"id": "36231912-6ad2-4f94-b3f0-3e1c47b777fe", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-440948387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e760df406d80477a9a7c4d345093d3db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap595d3b80-12", "ovs_interfaceid": "595d3b80-121a-4ab1-9ece-34303f1a5b18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.388403] env[69475]: DEBUG nova.compute.utils [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 596.388733] env[69475]: DEBUG nova.compute.manager [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 596.388848] env[69475]: DEBUG nova.network.neutron [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 596.445380] env[69475]: DEBUG nova.network.neutron [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Successfully updated port: fd190b86-eed6-4857-9dcb-7fc4a209989d {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 596.571123] env[69475]: DEBUG nova.policy [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '283113edd1e8446b9be0d03750317b83', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'acd5b9b57fca4334826b9846abe4354a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 596.766972] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Releasing lock "refresh_cache-91d5b0db-63a5-4290-af9b-264a5ce4cd95" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 596.767124] env[69475]: DEBUG nova.compute.manager [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Instance network_info: |[{"id": "595d3b80-121a-4ab1-9ece-34303f1a5b18", "address": "fa:16:3e:29:a5:ff", "network": {"id": "36231912-6ad2-4f94-b3f0-3e1c47b777fe", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-440948387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e760df406d80477a9a7c4d345093d3db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap595d3b80-12", "ovs_interfaceid": "595d3b80-121a-4ab1-9ece-34303f1a5b18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 596.767691] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:a5:ff', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '424fd631-4456-4ce2-8924-a2ed81d60bd6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '595d3b80-121a-4ab1-9ece-34303f1a5b18', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 596.785335] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 596.785922] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cace677c-901b-442f-b56c-2435a858cba5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.800582] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Created folder: OpenStack in parent group-v4. [ 596.800791] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Creating folder: Project (e760df406d80477a9a7c4d345093d3db). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 596.801072] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c36b7e52-843d-4656-b3e3-9b9c4d19d012 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.816414] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Created folder: Project (e760df406d80477a9a7c4d345093d3db) in parent group-v700823. [ 596.816414] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Creating folder: Instances. Parent ref: group-v700824. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 596.816414] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9514a8c-38a5-4d8b-ae19-89387f83d934 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.829102] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Created folder: Instances in parent group-v700824. [ 596.829102] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 596.829102] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 596.829102] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac9620b1-a6e7-4b0b-ba77-30fa3e8fc841 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.856653] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 596.856653] env[69475]: value = "task-3507457" [ 596.856653] env[69475]: _type = "Task" [ 596.856653] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.867413] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507457, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.892810] env[69475]: DEBUG nova.compute.manager [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 596.951094] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquiring lock "refresh_cache-dc2614b1-95b8-4887-8ca6-efe92921c926" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 596.951284] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquired lock "refresh_cache-dc2614b1-95b8-4887-8ca6-efe92921c926" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 596.951427] env[69475]: DEBUG nova.network.neutron [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 597.036192] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c734ff54-1e82-4607-9723-e995f36b12cb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.048068] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3904294d-fd18-4661-9697-84ec24d342bb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.101285] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-556cede0-2813-4505-b6f4-ac277bfa3c3e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.113193] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32bb103-4185-40e8-ba62-de65a775a483 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.127702] env[69475]: DEBUG nova.compute.provider_tree [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.378975] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507457, 'name': CreateVM_Task, 'duration_secs': 0.34914} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.378975] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 597.417417] env[69475]: DEBUG oslo_vmware.service [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227bea8b-077a-4ed6-aa0d-6bf444e84d0c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.432026] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.432026] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 597.432972] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 597.433278] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0960e991-cbd5-4c11-8fc8-f082f36c8682 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.438711] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 597.438711] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528c10ab-7173-613c-c7a4-e755619eda6a" [ 597.438711] env[69475]: _type = "Task" [ 597.438711] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.447895] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528c10ab-7173-613c-c7a4-e755619eda6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.604063] env[69475]: DEBUG nova.network.neutron [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 597.632103] env[69475]: DEBUG nova.scheduler.client.report [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 597.905170] env[69475]: DEBUG nova.compute.manager [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 597.930197] env[69475]: DEBUG nova.virt.hardware [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 597.930197] env[69475]: DEBUG nova.virt.hardware [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 597.930197] env[69475]: DEBUG nova.virt.hardware [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 597.930381] env[69475]: DEBUG nova.virt.hardware [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 597.930381] env[69475]: DEBUG nova.virt.hardware [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 597.931621] env[69475]: DEBUG nova.virt.hardware [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 597.931903] env[69475]: DEBUG nova.virt.hardware [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 597.932096] env[69475]: DEBUG nova.virt.hardware [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 597.933739] env[69475]: DEBUG nova.virt.hardware [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 597.933739] env[69475]: DEBUG nova.virt.hardware [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 597.933739] env[69475]: DEBUG nova.virt.hardware [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 597.934484] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5d960e-3e9e-4e97-8e33-127b573eff61 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.952476] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951f836c-b308-4917-b905-0c14ac23929a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.980367] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 597.980657] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 597.981227] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 597.982486] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 597.982735] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 597.982985] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c63e428-80c4-43d8-9492-b0c0b087f4b7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.993970] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 597.993970] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 597.993970] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a60c8e51-f533-47a7-9b32-002c6c4d133a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.001858] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e92dd8a-005e-43cf-9015-8a259ecd38c4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.011173] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 598.011173] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d7db29-0c30-2fd6-f36b-36dc15589d12" [ 598.011173] env[69475]: _type = "Task" [ 598.011173] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.022080] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Preparing fetch location {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 598.025020] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Creating directory with path [datastore1] vmware_temp/67427f74-91a8-4276-bf32-da18a26ac54b/afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 598.025020] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f0e0a72e-370d-4617-81d4-8512ca6fd25d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.046127] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Created directory with path [datastore1] vmware_temp/67427f74-91a8-4276-bf32-da18a26ac54b/afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 598.046127] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Fetch image to [datastore1] vmware_temp/67427f74-91a8-4276-bf32-da18a26ac54b/afa9d32c-9f39-44fb-bf3b-50d35842a59f/tmp-sparse.vmdk {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 598.046261] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Downloading image file data afa9d32c-9f39-44fb-bf3b-50d35842a59f to [datastore1] vmware_temp/67427f74-91a8-4276-bf32-da18a26ac54b/afa9d32c-9f39-44fb-bf3b-50d35842a59f/tmp-sparse.vmdk on the data store datastore1 {{(pid=69475) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 598.047022] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aaf002c-d613-4a34-876f-d4742f432858 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.057961] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34fc4b68-02ac-40b5-b590-46ce586afab5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.068881] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6384c517-ad0d-4234-8c48-8deb4b4572c9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.108358] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88ef55a-8d21-4273-8c4d-7a6968708137 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.117587] env[69475]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-fc213452-66df-4cb1-8cbf-eb947ecf8e43 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.140332] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.266s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 598.140452] env[69475]: DEBUG nova.compute.manager [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 598.143398] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.054s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.145284] env[69475]: INFO nova.compute.claims [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.153357] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Downloading image file data afa9d32c-9f39-44fb-bf3b-50d35842a59f to the data store datastore1 {{(pid=69475) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 598.266021] env[69475]: DEBUG oslo_vmware.rw_handles [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/67427f74-91a8-4276-bf32-da18a26ac54b/afa9d32c-9f39-44fb-bf3b-50d35842a59f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69475) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 598.453230] env[69475]: DEBUG nova.network.neutron [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Updating instance_info_cache with network_info: [{"id": "fd190b86-eed6-4857-9dcb-7fc4a209989d", "address": "fa:16:3e:bd:a8:90", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd190b86-ee", "ovs_interfaceid": "fd190b86-eed6-4857-9dcb-7fc4a209989d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.650867] env[69475]: DEBUG nova.compute.utils [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 598.666705] env[69475]: DEBUG nova.compute.manager [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 598.666902] env[69475]: DEBUG nova.network.neutron [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 598.958033] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Releasing lock "refresh_cache-dc2614b1-95b8-4887-8ca6-efe92921c926" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 598.958033] env[69475]: DEBUG nova.compute.manager [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Instance network_info: |[{"id": "fd190b86-eed6-4857-9dcb-7fc4a209989d", "address": "fa:16:3e:bd:a8:90", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd190b86-ee", "ovs_interfaceid": "fd190b86-eed6-4857-9dcb-7fc4a209989d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 598.958266] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:a8:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fd190b86-eed6-4857-9dcb-7fc4a209989d', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 598.970041] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Creating folder: Project (a4238057618546babe78b7b37966652e). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 598.970502] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-888aedc1-9627-4c4b-87c2-7cf98875c433 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.981805] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Created folder: Project (a4238057618546babe78b7b37966652e) in parent group-v700823. [ 598.982084] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Creating folder: Instances. Parent ref: group-v700827. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 598.982916] env[69475]: DEBUG nova.network.neutron [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Successfully created port: 9cdc8cda-340c-4ebc-884a-d52746c1cda6 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 598.986526] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c311d76d-5f7b-4c67-88c4-2f21acac13b0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.000010] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Created folder: Instances in parent group-v700827. [ 599.000010] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 599.000010] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 599.000010] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-35e12b3a-3996-4b8b-8a86-44c88276db0c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.028013] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 599.028013] env[69475]: value = "task-3507460" [ 599.028013] env[69475]: _type = "Task" [ 599.028013] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.038591] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507460, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.044355] env[69475]: DEBUG nova.policy [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '70142cccbe764ad792c910a7e7b29584', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f9ff44ddd0f4b2393e659ba2bd2cfa6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 599.072307] env[69475]: DEBUG oslo_vmware.rw_handles [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Completed reading data from the image iterator. {{(pid=69475) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 599.072307] env[69475]: DEBUG oslo_vmware.rw_handles [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/67427f74-91a8-4276-bf32-da18a26ac54b/afa9d32c-9f39-44fb-bf3b-50d35842a59f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 599.143490] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Downloaded image file data afa9d32c-9f39-44fb-bf3b-50d35842a59f to vmware_temp/67427f74-91a8-4276-bf32-da18a26ac54b/afa9d32c-9f39-44fb-bf3b-50d35842a59f/tmp-sparse.vmdk on the data store datastore1 {{(pid=69475) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 599.143490] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Caching image {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 599.143775] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Copying Virtual Disk [datastore1] vmware_temp/67427f74-91a8-4276-bf32-da18a26ac54b/afa9d32c-9f39-44fb-bf3b-50d35842a59f/tmp-sparse.vmdk to [datastore1] vmware_temp/67427f74-91a8-4276-bf32-da18a26ac54b/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 599.144193] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1fced81-bd9d-45d0-8ad5-bb9d9fe23d83 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.153754] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 599.153754] env[69475]: value = "task-3507461" [ 599.153754] env[69475]: _type = "Task" [ 599.153754] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.162598] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507461, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.171928] env[69475]: DEBUG nova.compute.manager [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 599.377042] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd45ad4-4f9d-4bb7-956d-35077cefa2a1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.385531] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0792a20e-0d4e-46ee-8c5f-5ee91ff5528b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.418992] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510ccbf7-9b02-4e3d-9622-cc446f390bf7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.427342] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d48201b6-0c36-42cb-97aa-4e71ba54e92a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.444109] env[69475]: DEBUG nova.compute.provider_tree [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 599.539020] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507460, 'name': CreateVM_Task, 'duration_secs': 0.343599} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.539020] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 599.539020] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.539459] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 599.539459] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 599.539663] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3dec0bc9-1b94-42ab-bc0e-b9461e0c5084 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.545872] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for the task: (returnval){ [ 599.545872] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529dd9ff-ee24-5b9b-1002-093d7c88dec4" [ 599.545872] env[69475]: _type = "Task" [ 599.545872] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.555227] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529dd9ff-ee24-5b9b-1002-093d7c88dec4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.562237] env[69475]: DEBUG nova.compute.manager [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Received event network-changed-595d3b80-121a-4ab1-9ece-34303f1a5b18 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 599.562374] env[69475]: DEBUG nova.compute.manager [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Refreshing instance network info cache due to event network-changed-595d3b80-121a-4ab1-9ece-34303f1a5b18. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 599.562601] env[69475]: DEBUG oslo_concurrency.lockutils [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] Acquiring lock "refresh_cache-91d5b0db-63a5-4290-af9b-264a5ce4cd95" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 599.562745] env[69475]: DEBUG oslo_concurrency.lockutils [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] Acquired lock "refresh_cache-91d5b0db-63a5-4290-af9b-264a5ce4cd95" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 599.562914] env[69475]: DEBUG nova.network.neutron [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Refreshing network info cache for port 595d3b80-121a-4ab1-9ece-34303f1a5b18 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 599.665315] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507461, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.746020] env[69475]: DEBUG nova.network.neutron [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Successfully updated port: 77ecc630-733d-4acd-8e33-5354c11dd9a3 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 599.948452] env[69475]: DEBUG nova.scheduler.client.report [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 600.059723] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 600.059723] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 600.059723] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.172954] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507461, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.658387} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.172954] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Copied Virtual Disk [datastore1] vmware_temp/67427f74-91a8-4276-bf32-da18a26ac54b/afa9d32c-9f39-44fb-bf3b-50d35842a59f/tmp-sparse.vmdk to [datastore1] vmware_temp/67427f74-91a8-4276-bf32-da18a26ac54b/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 600.172954] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Deleting the datastore file [datastore1] vmware_temp/67427f74-91a8-4276-bf32-da18a26ac54b/afa9d32c-9f39-44fb-bf3b-50d35842a59f/tmp-sparse.vmdk {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 600.172954] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2b28be90-eb2e-49de-97a6-34458b9a665b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.179652] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 600.179652] env[69475]: value = "task-3507462" [ 600.179652] env[69475]: _type = "Task" [ 600.179652] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.189441] env[69475]: DEBUG nova.compute.manager [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 600.191767] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507462, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.246304] env[69475]: DEBUG nova.virt.hardware [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 600.246501] env[69475]: DEBUG nova.virt.hardware [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 600.246631] env[69475]: DEBUG nova.virt.hardware [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 600.246795] env[69475]: DEBUG nova.virt.hardware [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 600.246942] env[69475]: DEBUG nova.virt.hardware [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 600.247847] env[69475]: DEBUG nova.virt.hardware [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 600.248328] env[69475]: DEBUG nova.virt.hardware [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 600.248328] env[69475]: DEBUG nova.virt.hardware [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 600.248479] env[69475]: DEBUG nova.virt.hardware [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 600.248800] env[69475]: DEBUG nova.virt.hardware [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 600.248800] env[69475]: DEBUG nova.virt.hardware [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 600.249380] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Acquiring lock "refresh_cache-ec7a6b3c-2a2f-4edd-8b79-ba55551d6159" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.249525] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Acquired lock "refresh_cache-ec7a6b3c-2a2f-4edd-8b79-ba55551d6159" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 600.249682] env[69475]: DEBUG nova.network.neutron [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 600.254349] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3ea15fa-79d4-4577-bb12-84f4c8efb562 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.265196] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f6ec429-d29a-4423-b53e-cd7145d4f38d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.457798] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.314s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 600.458359] env[69475]: DEBUG nova.compute.manager [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 600.461707] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.222s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 600.463943] env[69475]: INFO nova.compute.claims [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 600.696243] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507462, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025276} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.696738] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 600.697480] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Moving file from [datastore1] vmware_temp/67427f74-91a8-4276-bf32-da18a26ac54b/afa9d32c-9f39-44fb-bf3b-50d35842a59f to [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f. {{(pid=69475) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 600.697741] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-359affef-df2e-48c8-bdfe-e2be361c7ba3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.706803] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 600.706803] env[69475]: value = "task-3507463" [ 600.706803] env[69475]: _type = "Task" [ 600.706803] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.716703] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507463, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.861009] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Acquiring lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 600.861294] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 600.880991] env[69475]: DEBUG nova.network.neutron [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 600.970625] env[69475]: DEBUG nova.compute.utils [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 600.977019] env[69475]: DEBUG nova.compute.manager [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 600.977019] env[69475]: DEBUG nova.network.neutron [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 601.086222] env[69475]: DEBUG nova.network.neutron [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Successfully created port: e1445b37-7f07-4058-88cb-07a6189aa684 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 601.136765] env[69475]: DEBUG nova.network.neutron [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Updated VIF entry in instance network info cache for port 595d3b80-121a-4ab1-9ece-34303f1a5b18. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 601.136765] env[69475]: DEBUG nova.network.neutron [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Updating instance_info_cache with network_info: [{"id": "595d3b80-121a-4ab1-9ece-34303f1a5b18", "address": "fa:16:3e:29:a5:ff", "network": {"id": "36231912-6ad2-4f94-b3f0-3e1c47b777fe", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-440948387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e760df406d80477a9a7c4d345093d3db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap595d3b80-12", "ovs_interfaceid": "595d3b80-121a-4ab1-9ece-34303f1a5b18", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.218942] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507463, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.024247} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.219222] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] File moved {{(pid=69475) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 601.219342] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Cleaning up location [datastore1] vmware_temp/67427f74-91a8-4276-bf32-da18a26ac54b {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 601.219682] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Deleting the datastore file [datastore1] vmware_temp/67427f74-91a8-4276-bf32-da18a26ac54b {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 601.219780] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15950110-599b-4489-97a5-e9bd15b7e70d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.229801] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 601.229801] env[69475]: value = "task-3507464" [ 601.229801] env[69475]: _type = "Task" [ 601.229801] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.242846] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507464, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.266238] env[69475]: DEBUG nova.policy [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ca9d444ae2a540bc92e282e146d1edb6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee2a30208a8e4a17a993e75cf6d76d11', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 601.364260] env[69475]: DEBUG nova.compute.manager [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 601.478020] env[69475]: DEBUG nova.compute.manager [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 601.605870] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7f2a95-9c7d-420c-b1e2-f34752d0312c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.616206] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86bfe92-3e86-4e1b-b9c1-cf90957963ef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.651461] env[69475]: DEBUG oslo_concurrency.lockutils [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] Releasing lock "refresh_cache-91d5b0db-63a5-4290-af9b-264a5ce4cd95" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 601.651823] env[69475]: DEBUG nova.compute.manager [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Received event network-vif-plugged-fd190b86-eed6-4857-9dcb-7fc4a209989d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 601.652069] env[69475]: DEBUG oslo_concurrency.lockutils [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] Acquiring lock "dc2614b1-95b8-4887-8ca6-efe92921c926-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 601.652314] env[69475]: DEBUG oslo_concurrency.lockutils [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] Lock "dc2614b1-95b8-4887-8ca6-efe92921c926-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 601.652511] env[69475]: DEBUG oslo_concurrency.lockutils [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] Lock "dc2614b1-95b8-4887-8ca6-efe92921c926-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 601.652709] env[69475]: DEBUG nova.compute.manager [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] No waiting events found dispatching network-vif-plugged-fd190b86-eed6-4857-9dcb-7fc4a209989d {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 601.652912] env[69475]: WARNING nova.compute.manager [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Received unexpected event network-vif-plugged-fd190b86-eed6-4857-9dcb-7fc4a209989d for instance with vm_state building and task_state spawning. [ 601.653116] env[69475]: DEBUG nova.compute.manager [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Received event network-changed-fd190b86-eed6-4857-9dcb-7fc4a209989d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 601.653301] env[69475]: DEBUG nova.compute.manager [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Refreshing instance network info cache due to event network-changed-fd190b86-eed6-4857-9dcb-7fc4a209989d. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 601.653517] env[69475]: DEBUG oslo_concurrency.lockutils [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] Acquiring lock "refresh_cache-dc2614b1-95b8-4887-8ca6-efe92921c926" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.653687] env[69475]: DEBUG oslo_concurrency.lockutils [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] Acquired lock "refresh_cache-dc2614b1-95b8-4887-8ca6-efe92921c926" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 601.653873] env[69475]: DEBUG nova.network.neutron [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Refreshing network info cache for port fd190b86-eed6-4857-9dcb-7fc4a209989d {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 601.657716] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4779ec-d78e-4131-b1d7-ff2d79b4fe8c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.666195] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b428c586-f8c9-4ca0-bbdd-0d57bce680c1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.681127] env[69475]: DEBUG nova.compute.provider_tree [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.746199] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507464, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.02579} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.746199] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 601.746781] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fc5332f-81af-41b5-bd1e-62b7bdd57beb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.752998] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 601.752998] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c31810-9661-3cf1-bbcd-5fbb0e7b92b9" [ 601.752998] env[69475]: _type = "Task" [ 601.752998] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.767893] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c31810-9661-3cf1-bbcd-5fbb0e7b92b9, 'name': SearchDatastore_Task, 'duration_secs': 0.008261} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.768171] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 601.768421] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 91d5b0db-63a5-4290-af9b-264a5ce4cd95/91d5b0db-63a5-4290-af9b-264a5ce4cd95.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 601.768688] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 601.768873] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 601.769095] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0d297d1-25a3-4225-8c7e-43859efdb4ec {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.771333] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd380b57-2f20-47eb-b636-726511613048 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.778595] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 601.778595] env[69475]: value = "task-3507465" [ 601.778595] env[69475]: _type = "Task" [ 601.778595] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.779725] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 601.779895] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 601.784201] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2dd1e07-2786-4e4d-8971-059f7bb1e5c2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.792425] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507465, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.793761] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for the task: (returnval){ [ 601.793761] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5269fb08-cd4d-093d-a4ee-93a55dcc0109" [ 601.793761] env[69475]: _type = "Task" [ 601.793761] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.802383] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5269fb08-cd4d-093d-a4ee-93a55dcc0109, 'name': SearchDatastore_Task, 'duration_secs': 0.007592} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.804339] env[69475]: DEBUG nova.network.neutron [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Updating instance_info_cache with network_info: [{"id": "77ecc630-733d-4acd-8e33-5354c11dd9a3", "address": "fa:16:3e:ca:e0:05", "network": {"id": "1b38c0b0-ee6a-44ad-af8f-0b162150eeca", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-245985021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5e8e7c2b47143ada53b9ba98d18f697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77ecc630-73", "ovs_interfaceid": "77ecc630-733d-4acd-8e33-5354c11dd9a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.805178] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37a71b02-81f9-4845-a21a-52fda6e081fb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.813626] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for the task: (returnval){ [ 601.813626] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d26757-c32a-1ff0-cba3-90feb135541c" [ 601.813626] env[69475]: _type = "Task" [ 601.813626] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.821417] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d26757-c32a-1ff0-cba3-90feb135541c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.893847] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.184836] env[69475]: DEBUG nova.scheduler.client.report [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 602.296093] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507465, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484502} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.296740] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 91d5b0db-63a5-4290-af9b-264a5ce4cd95/91d5b0db-63a5-4290-af9b-264a5ce4cd95.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 602.297575] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 602.297575] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f8a7a96f-c8f1-4846-8122-7d2b0d598cc3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.307153] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 602.307153] env[69475]: value = "task-3507466" [ 602.307153] env[69475]: _type = "Task" [ 602.307153] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.309234] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Releasing lock "refresh_cache-ec7a6b3c-2a2f-4edd-8b79-ba55551d6159" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 602.309605] env[69475]: DEBUG nova.compute.manager [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Instance network_info: |[{"id": "77ecc630-733d-4acd-8e33-5354c11dd9a3", "address": "fa:16:3e:ca:e0:05", "network": {"id": "1b38c0b0-ee6a-44ad-af8f-0b162150eeca", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-245985021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5e8e7c2b47143ada53b9ba98d18f697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77ecc630-73", "ovs_interfaceid": "77ecc630-733d-4acd-8e33-5354c11dd9a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 602.310077] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ca:e0:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3739ba33-c119-432c-9aee-80a62864317d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '77ecc630-733d-4acd-8e33-5354c11dd9a3', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 602.317779] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Creating folder: Project (c5e8e7c2b47143ada53b9ba98d18f697). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 602.318664] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5f8b9f2-d8f9-47c2-81ca-3854322c52e7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.330759] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507466, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.339056] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d26757-c32a-1ff0-cba3-90feb135541c, 'name': SearchDatastore_Task, 'duration_secs': 0.009736} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.343396] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 602.343396] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] dc2614b1-95b8-4887-8ca6-efe92921c926/dc2614b1-95b8-4887-8ca6-efe92921c926.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 602.344498] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fb56f5f0-e0b1-46b4-940e-b381c32f23e2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.347263] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquiring lock "25c44ae0-4193-4833-85ec-ebc0ef3cf593" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.347470] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "25c44ae0-4193-4833-85ec-ebc0ef3cf593" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.348571] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Created folder: Project (c5e8e7c2b47143ada53b9ba98d18f697) in parent group-v700823. [ 602.348749] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Creating folder: Instances. Parent ref: group-v700830. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 602.349225] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5601b5ac-786e-465a-b15d-574b955933fd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.356909] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for the task: (returnval){ [ 602.356909] env[69475]: value = "task-3507469" [ 602.356909] env[69475]: _type = "Task" [ 602.356909] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.362710] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Created folder: Instances in parent group-v700830. [ 602.362710] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 602.362806] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 602.363080] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b4d8a448-0791-4045-bb3d-14bd8b564d63 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.382049] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507469, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.388919] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 602.388919] env[69475]: value = "task-3507470" [ 602.388919] env[69475]: _type = "Task" [ 602.388919] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.395407] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507470, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.496602] env[69475]: DEBUG nova.compute.manager [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 602.525312] env[69475]: DEBUG nova.virt.hardware [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 602.525312] env[69475]: DEBUG nova.virt.hardware [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 602.525312] env[69475]: DEBUG nova.virt.hardware [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 602.525603] env[69475]: DEBUG nova.virt.hardware [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 602.525603] env[69475]: DEBUG nova.virt.hardware [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 602.525603] env[69475]: DEBUG nova.virt.hardware [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 602.525801] env[69475]: DEBUG nova.virt.hardware [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 602.526635] env[69475]: DEBUG nova.virt.hardware [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 602.526635] env[69475]: DEBUG nova.virt.hardware [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 602.526635] env[69475]: DEBUG nova.virt.hardware [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 602.526635] env[69475]: DEBUG nova.virt.hardware [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 602.527270] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1119737-2f2d-4a8b-83c2-bde68bcb4892 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.534985] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30103e8e-9249-4241-b986-d650c4cc4d78 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.695474] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.234s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.696090] env[69475]: DEBUG nova.compute.manager [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 602.700819] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 8.310s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.700819] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.700819] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69475) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 602.701133] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.808s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.702811] env[69475]: INFO nova.compute.claims [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 602.706406] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f60f9ac0-570d-4c65-9781-b7cad5b86382 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.719449] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1168eb1-1468-43d4-809d-6f6d08dcb69a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.738733] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc55e5b6-a6f7-42f5-97f7-426f38365311 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.747027] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49ba7f1-fdde-4838-98c2-80253b2a0613 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.784595] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180887MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=69475) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 602.784763] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.815909] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507466, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065794} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.815909] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 602.816644] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-168909d0-cd26-45d1-aa3e-812448958950 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.842470] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] 91d5b0db-63a5-4290-af9b-264a5ce4cd95/91d5b0db-63a5-4290-af9b-264a5ce4cd95.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 602.844501] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-acddbbb3-d4bb-4610-8939-c94e5e94c606 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.862293] env[69475]: DEBUG nova.compute.manager [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 602.877854] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507469, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.878511] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 602.878511] env[69475]: value = "task-3507471" [ 602.878511] env[69475]: _type = "Task" [ 602.878511] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.895847] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507470, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.193652] env[69475]: DEBUG nova.network.neutron [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Successfully created port: 17befd20-3223-44d2-87ad-fed58da5076e {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 603.208116] env[69475]: DEBUG nova.compute.utils [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 603.210029] env[69475]: DEBUG nova.compute.manager [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 603.210271] env[69475]: DEBUG nova.network.neutron [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 603.244518] env[69475]: DEBUG nova.network.neutron [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Updated VIF entry in instance network info cache for port fd190b86-eed6-4857-9dcb-7fc4a209989d. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 603.244861] env[69475]: DEBUG nova.network.neutron [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Updating instance_info_cache with network_info: [{"id": "fd190b86-eed6-4857-9dcb-7fc4a209989d", "address": "fa:16:3e:bd:a8:90", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd190b86-ee", "ovs_interfaceid": "fd190b86-eed6-4857-9dcb-7fc4a209989d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.278828] env[69475]: DEBUG nova.network.neutron [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Successfully updated port: 9cdc8cda-340c-4ebc-884a-d52746c1cda6 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 603.392572] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507469, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.843935} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.396418] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] dc2614b1-95b8-4887-8ca6-efe92921c926/dc2614b1-95b8-4887-8ca6-efe92921c926.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 603.396811] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 603.400611] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c79ac6a-c8ed-41b4-9975-977c7890e725 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.403544] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507471, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.408670] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507470, 'name': CreateVM_Task, 'duration_secs': 0.783101} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.410073] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 603.410194] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for the task: (returnval){ [ 603.410194] env[69475]: value = "task-3507472" [ 603.410194] env[69475]: _type = "Task" [ 603.410194] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.411072] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.411072] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 603.411594] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 603.411989] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4697194-2b06-48ad-9c61-fcb5271969c0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.417854] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.421322] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Waiting for the task: (returnval){ [ 603.421322] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527b6ba2-a806-2e70-b408-8826a6b73b4e" [ 603.421322] env[69475]: _type = "Task" [ 603.421322] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.424242] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507472, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.433854] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527b6ba2-a806-2e70-b408-8826a6b73b4e, 'name': SearchDatastore_Task, 'duration_secs': 0.009218} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.433997] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 603.434286] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 603.434524] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.434655] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 603.434835] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 603.435122] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2edf51ab-00c6-449f-9fa7-f5588de401f4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.442569] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 603.442749] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 603.443477] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f86c9faf-0dac-44d3-b09c-d7cfc892e2c9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.448902] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Waiting for the task: (returnval){ [ 603.448902] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5262c1a8-d4cd-a3c6-1b06-9fcff3742592" [ 603.448902] env[69475]: _type = "Task" [ 603.448902] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.457802] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5262c1a8-d4cd-a3c6-1b06-9fcff3742592, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.500812] env[69475]: DEBUG nova.policy [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98863bb99e474ed5bbbbb029574d1769', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd1d7ffc7ecd34b918b1998fdc97c3425', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 603.715138] env[69475]: DEBUG nova.compute.manager [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 603.750339] env[69475]: DEBUG oslo_concurrency.lockutils [req-cabbcd9e-1ae8-4e4a-a740-a37bc91b407d req-effb2e91-a760-42fa-8fef-ccf5457885bc service nova] Releasing lock "refresh_cache-dc2614b1-95b8-4887-8ca6-efe92921c926" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 603.791309] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Acquiring lock "refresh_cache-e1ecc905-22da-434a-8ddf-a66f88ab47fb" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.791309] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Acquired lock "refresh_cache-e1ecc905-22da-434a-8ddf-a66f88ab47fb" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 603.791309] env[69475]: DEBUG nova.network.neutron [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 603.890404] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507471, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.928207] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507472, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065586} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.928911] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 603.929930] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e3e808-331c-42d6-9562-8ee7195f2884 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.960555] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] dc2614b1-95b8-4887-8ca6-efe92921c926/dc2614b1-95b8-4887-8ca6-efe92921c926.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 603.963979] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a52ce8b8-4ba3-4258-8907-6142cec8376e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.997510] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5262c1a8-d4cd-a3c6-1b06-9fcff3742592, 'name': SearchDatastore_Task, 'duration_secs': 0.008259} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.003451] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for the task: (returnval){ [ 604.003451] env[69475]: value = "task-3507473" [ 604.003451] env[69475]: _type = "Task" [ 604.003451] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.004847] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e978df96-afa9-406f-b925-48742e0bd6ed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.015631] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Waiting for the task: (returnval){ [ 604.015631] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52433d22-9ea1-a555-eaf6-2869fc213e88" [ 604.015631] env[69475]: _type = "Task" [ 604.015631] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.020482] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507473, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.034998] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52433d22-9ea1-a555-eaf6-2869fc213e88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.094330] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3900faa9-3fc6-4ab0-8255-96770f40093e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.108854] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5956e757-0245-42ff-ad7f-29b0cea62cc5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.148019] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89082d3-1a54-49cd-9956-ba9f2453d7fc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.154747] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-233664bc-2e7d-4fe2-9714-912fbfd9c55b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.172658] env[69475]: DEBUG nova.compute.provider_tree [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.391433] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507471, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.517963] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507473, 'name': ReconfigVM_Task, 'duration_secs': 0.420572} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.517963] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Reconfigured VM instance instance-00000002 to attach disk [datastore1] dc2614b1-95b8-4887-8ca6-efe92921c926/dc2614b1-95b8-4887-8ca6-efe92921c926.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 604.519343] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24dcab5e-8917-4a04-9a7a-c8585506e41f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.525148] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for the task: (returnval){ [ 604.525148] env[69475]: value = "task-3507474" [ 604.525148] env[69475]: _type = "Task" [ 604.525148] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.533880] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52433d22-9ea1-a555-eaf6-2869fc213e88, 'name': SearchDatastore_Task, 'duration_secs': 0.029833} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.533880] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.533880] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] ec7a6b3c-2a2f-4edd-8b79-ba55551d6159/ec7a6b3c-2a2f-4edd-8b79-ba55551d6159.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 604.533880] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-07ea1c81-642a-44b6-abe1-64323c06f917 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.538568] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507474, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.544771] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Waiting for the task: (returnval){ [ 604.544771] env[69475]: value = "task-3507475" [ 604.544771] env[69475]: _type = "Task" [ 604.544771] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.553274] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': task-3507475, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.596908] env[69475]: DEBUG nova.network.neutron [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 604.642132] env[69475]: DEBUG nova.compute.manager [req-225459be-6bc7-4b45-a23a-dd72bc9490be req-68e19b05-065d-4963-8bf1-39a869fd0bf1 service nova] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Received event network-vif-plugged-77ecc630-733d-4acd-8e33-5354c11dd9a3 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 604.642706] env[69475]: DEBUG oslo_concurrency.lockutils [req-225459be-6bc7-4b45-a23a-dd72bc9490be req-68e19b05-065d-4963-8bf1-39a869fd0bf1 service nova] Acquiring lock "ec7a6b3c-2a2f-4edd-8b79-ba55551d6159-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 604.645873] env[69475]: DEBUG oslo_concurrency.lockutils [req-225459be-6bc7-4b45-a23a-dd72bc9490be req-68e19b05-065d-4963-8bf1-39a869fd0bf1 service nova] Lock "ec7a6b3c-2a2f-4edd-8b79-ba55551d6159-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.645873] env[69475]: DEBUG oslo_concurrency.lockutils [req-225459be-6bc7-4b45-a23a-dd72bc9490be req-68e19b05-065d-4963-8bf1-39a869fd0bf1 service nova] Lock "ec7a6b3c-2a2f-4edd-8b79-ba55551d6159-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.645873] env[69475]: DEBUG nova.compute.manager [req-225459be-6bc7-4b45-a23a-dd72bc9490be req-68e19b05-065d-4963-8bf1-39a869fd0bf1 service nova] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] No waiting events found dispatching network-vif-plugged-77ecc630-733d-4acd-8e33-5354c11dd9a3 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 604.645873] env[69475]: WARNING nova.compute.manager [req-225459be-6bc7-4b45-a23a-dd72bc9490be req-68e19b05-065d-4963-8bf1-39a869fd0bf1 service nova] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Received unexpected event network-vif-plugged-77ecc630-733d-4acd-8e33-5354c11dd9a3 for instance with vm_state building and task_state spawning. [ 604.645873] env[69475]: DEBUG nova.compute.manager [req-225459be-6bc7-4b45-a23a-dd72bc9490be req-68e19b05-065d-4963-8bf1-39a869fd0bf1 service nova] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Received event network-changed-77ecc630-733d-4acd-8e33-5354c11dd9a3 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 604.646380] env[69475]: DEBUG nova.compute.manager [req-225459be-6bc7-4b45-a23a-dd72bc9490be req-68e19b05-065d-4963-8bf1-39a869fd0bf1 service nova] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Refreshing instance network info cache due to event network-changed-77ecc630-733d-4acd-8e33-5354c11dd9a3. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 604.646380] env[69475]: DEBUG oslo_concurrency.lockutils [req-225459be-6bc7-4b45-a23a-dd72bc9490be req-68e19b05-065d-4963-8bf1-39a869fd0bf1 service nova] Acquiring lock "refresh_cache-ec7a6b3c-2a2f-4edd-8b79-ba55551d6159" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.646380] env[69475]: DEBUG oslo_concurrency.lockutils [req-225459be-6bc7-4b45-a23a-dd72bc9490be req-68e19b05-065d-4963-8bf1-39a869fd0bf1 service nova] Acquired lock "refresh_cache-ec7a6b3c-2a2f-4edd-8b79-ba55551d6159" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.646380] env[69475]: DEBUG nova.network.neutron [req-225459be-6bc7-4b45-a23a-dd72bc9490be req-68e19b05-065d-4963-8bf1-39a869fd0bf1 service nova] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Refreshing network info cache for port 77ecc630-733d-4acd-8e33-5354c11dd9a3 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 604.680041] env[69475]: DEBUG nova.scheduler.client.report [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 604.732322] env[69475]: DEBUG nova.compute.manager [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 604.774838] env[69475]: DEBUG nova.virt.hardware [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 604.775218] env[69475]: DEBUG nova.virt.hardware [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 604.775280] env[69475]: DEBUG nova.virt.hardware [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 604.775461] env[69475]: DEBUG nova.virt.hardware [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 604.775605] env[69475]: DEBUG nova.virt.hardware [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 604.775752] env[69475]: DEBUG nova.virt.hardware [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 604.775960] env[69475]: DEBUG nova.virt.hardware [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 604.776135] env[69475]: DEBUG nova.virt.hardware [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 604.776305] env[69475]: DEBUG nova.virt.hardware [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 604.776466] env[69475]: DEBUG nova.virt.hardware [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 604.776635] env[69475]: DEBUG nova.virt.hardware [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 604.777598] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9afdf291-77cf-4ba2-b362-5ed0a37dda73 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.785906] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2a518a-f957-4c38-869b-4de02d0352d6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.894176] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507471, 'name': ReconfigVM_Task, 'duration_secs': 1.550423} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.894458] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Reconfigured VM instance instance-00000001 to attach disk [datastore1] 91d5b0db-63a5-4290-af9b-264a5ce4cd95/91d5b0db-63a5-4290-af9b-264a5ce4cd95.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 604.895131] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9e751ea4-79ba-49db-8037-249fff014d6b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.902684] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 604.902684] env[69475]: value = "task-3507476" [ 604.902684] env[69475]: _type = "Task" [ 604.902684] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.912040] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507476, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.036980] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507474, 'name': Rename_Task, 'duration_secs': 0.188923} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.036980] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 605.037189] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-582c0a88-8267-4b10-bf30-d7b06fe5cd88 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.044055] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for the task: (returnval){ [ 605.044055] env[69475]: value = "task-3507477" [ 605.044055] env[69475]: _type = "Task" [ 605.044055] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.056338] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507477, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.060490] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': task-3507475, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470639} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.061549] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] ec7a6b3c-2a2f-4edd-8b79-ba55551d6159/ec7a6b3c-2a2f-4edd-8b79-ba55551d6159.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 605.061549] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 605.061549] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b64f705a-77d8-484b-aba9-f63d921abc26 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.069758] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Waiting for the task: (returnval){ [ 605.069758] env[69475]: value = "task-3507478" [ 605.069758] env[69475]: _type = "Task" [ 605.069758] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.085236] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': task-3507478, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.190635] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.489s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 605.190635] env[69475]: DEBUG nova.compute.manager [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 605.196458] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.410s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 605.207804] env[69475]: DEBUG nova.network.neutron [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Successfully updated port: e1445b37-7f07-4058-88cb-07a6189aa684 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 605.288329] env[69475]: DEBUG nova.network.neutron [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Successfully created port: 56faa0c7-80a1-46f5-8167-4485e94846ea {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 605.360527] env[69475]: DEBUG nova.network.neutron [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Updating instance_info_cache with network_info: [{"id": "9cdc8cda-340c-4ebc-884a-d52746c1cda6", "address": "fa:16:3e:07:5a:79", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.66", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cdc8cda-34", "ovs_interfaceid": "9cdc8cda-340c-4ebc-884a-d52746c1cda6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.416302] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507476, 'name': Rename_Task, 'duration_secs': 0.218645} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.416500] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 605.416762] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed8e5cee-db29-4c86-a140-d0484808fe89 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.427086] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 605.427086] env[69475]: value = "task-3507479" [ 605.427086] env[69475]: _type = "Task" [ 605.427086] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.437167] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507479, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.558218] env[69475]: DEBUG oslo_vmware.api [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507477, 'name': PowerOnVM_Task, 'duration_secs': 0.473561} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.558218] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 605.558653] env[69475]: INFO nova.compute.manager [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Took 12.17 seconds to spawn the instance on the hypervisor. [ 605.559131] env[69475]: DEBUG nova.compute.manager [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 605.560273] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f91120-16fc-4b3c-be1b-dd118f5ca3a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.586137] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': task-3507478, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074101} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.586420] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 605.587268] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfbfe868-c717-43ec-a82e-b4672050f92d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.615947] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] ec7a6b3c-2a2f-4edd-8b79-ba55551d6159/ec7a6b3c-2a2f-4edd-8b79-ba55551d6159.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 605.618838] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-761154cb-ca4d-44f7-aab3-e0738fa78a20 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.642535] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Waiting for the task: (returnval){ [ 605.642535] env[69475]: value = "task-3507480" [ 605.642535] env[69475]: _type = "Task" [ 605.642535] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.652818] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': task-3507480, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.698180] env[69475]: DEBUG nova.compute.utils [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 605.701203] env[69475]: DEBUG nova.compute.manager [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 605.701203] env[69475]: DEBUG nova.network.neutron [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 605.710692] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquiring lock "refresh_cache-4465f156-09cc-4eba-90e4-be76f3010363" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.710692] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquired lock "refresh_cache-4465f156-09cc-4eba-90e4-be76f3010363" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 605.710692] env[69475]: DEBUG nova.network.neutron [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 605.836334] env[69475]: DEBUG nova.policy [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2931a0debdd64b0795f1de5dc3a99002', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25e8170617f6470dbcf8c36752c83214', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 605.866511] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Releasing lock "refresh_cache-e1ecc905-22da-434a-8ddf-a66f88ab47fb" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 605.866511] env[69475]: DEBUG nova.compute.manager [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Instance network_info: |[{"id": "9cdc8cda-340c-4ebc-884a-d52746c1cda6", "address": "fa:16:3e:07:5a:79", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.66", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cdc8cda-34", "ovs_interfaceid": "9cdc8cda-340c-4ebc-884a-d52746c1cda6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 605.866755] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:5a:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9cdc8cda-340c-4ebc-884a-d52746c1cda6', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 605.874776] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Creating folder: Project (acd5b9b57fca4334826b9846abe4354a). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 605.875515] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed99c391-df04-416f-8f55-5f25b41835e7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.887254] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Created folder: Project (acd5b9b57fca4334826b9846abe4354a) in parent group-v700823. [ 605.887588] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Creating folder: Instances. Parent ref: group-v700833. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 605.887865] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d22ec00-39a2-4e77-91f5-4b4db83b76cd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.898205] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Created folder: Instances in parent group-v700833. [ 605.898690] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 605.898967] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 605.900351] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8628604c-c2fe-44cb-98fa-34c2d8ca8202 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.925864] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 605.925864] env[69475]: value = "task-3507483" [ 605.925864] env[69475]: _type = "Task" [ 605.925864] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.937825] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507483, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.941542] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507479, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.090806] env[69475]: INFO nova.compute.manager [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Took 18.87 seconds to build instance. [ 606.155014] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': task-3507480, 'name': ReconfigVM_Task, 'duration_secs': 0.509151} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.155448] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Reconfigured VM instance instance-00000003 to attach disk [datastore1] ec7a6b3c-2a2f-4edd-8b79-ba55551d6159/ec7a6b3c-2a2f-4edd-8b79-ba55551d6159.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 606.156117] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8edc818-8186-42ff-99fb-1519f44e405f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.162514] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Waiting for the task: (returnval){ [ 606.162514] env[69475]: value = "task-3507484" [ 606.162514] env[69475]: _type = "Task" [ 606.162514] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.172902] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': task-3507484, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.208601] env[69475]: DEBUG nova.compute.manager [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 606.233702] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 91d5b0db-63a5-4290-af9b-264a5ce4cd95 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 606.233866] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance ec7a6b3c-2a2f-4edd-8b79-ba55551d6159 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 606.233996] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance dc2614b1-95b8-4887-8ca6-efe92921c926 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 606.234145] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance e1ecc905-22da-434a-8ddf-a66f88ab47fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 606.234262] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 4465f156-09cc-4eba-90e4-be76f3010363 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 606.234375] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 48bc79bc-df56-4523-808f-a71b391062b9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 606.234489] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 7be48799-ea4a-4e7f-95c2-637460596cfc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 606.234601] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 606.333538] env[69475]: DEBUG nova.network.neutron [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 606.335980] env[69475]: DEBUG nova.network.neutron [req-225459be-6bc7-4b45-a23a-dd72bc9490be req-68e19b05-065d-4963-8bf1-39a869fd0bf1 service nova] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Updated VIF entry in instance network info cache for port 77ecc630-733d-4acd-8e33-5354c11dd9a3. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 606.336486] env[69475]: DEBUG nova.network.neutron [req-225459be-6bc7-4b45-a23a-dd72bc9490be req-68e19b05-065d-4963-8bf1-39a869fd0bf1 service nova] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Updating instance_info_cache with network_info: [{"id": "77ecc630-733d-4acd-8e33-5354c11dd9a3", "address": "fa:16:3e:ca:e0:05", "network": {"id": "1b38c0b0-ee6a-44ad-af8f-0b162150eeca", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-245985021-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c5e8e7c2b47143ada53b9ba98d18f697", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3739ba33-c119-432c-9aee-80a62864317d", "external-id": "nsx-vlan-transportzone-474", "segmentation_id": 474, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77ecc630-73", "ovs_interfaceid": "77ecc630-733d-4acd-8e33-5354c11dd9a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.446348] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507483, 'name': CreateVM_Task, 'duration_secs': 0.348416} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.446597] env[69475]: DEBUG oslo_vmware.api [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507479, 'name': PowerOnVM_Task, 'duration_secs': 0.521944} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.446748] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 606.446971] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 606.447929] env[69475]: INFO nova.compute.manager [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Took 15.32 seconds to spawn the instance on the hypervisor. [ 606.447929] env[69475]: DEBUG nova.compute.manager [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 606.448367] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.448520] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.448838] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 606.449934] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd51395-4e0d-4d05-be3a-42f4a26d3a60 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.453105] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17dda89c-55c1-4ad4-8c46-047f8d6db0cb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.462445] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Waiting for the task: (returnval){ [ 606.462445] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520f48e3-68bf-b04b-a944-fbdb2aa3c69d" [ 606.462445] env[69475]: _type = "Task" [ 606.462445] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.471083] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520f48e3-68bf-b04b-a944-fbdb2aa3c69d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.591098] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6f2a7fb9-07bc-4a30-9cc9-62139b9fd68b tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Lock "dc2614b1-95b8-4887-8ca6-efe92921c926" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.384s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.591394] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "dc2614b1-95b8-4887-8ca6-efe92921c926" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 15.594s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.592447] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c98c9fdc-2f7e-46e9-a1b8-cb8af76d9478 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.683386] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': task-3507484, 'name': Rename_Task, 'duration_secs': 0.313911} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.686018] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 606.686018] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8aa34d20-739f-4b63-90ab-1e411e726cbc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.694018] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Waiting for the task: (returnval){ [ 606.694018] env[69475]: value = "task-3507485" [ 606.694018] env[69475]: _type = "Task" [ 606.694018] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.705262] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': task-3507485, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.738771] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 25c44ae0-4193-4833-85ec-ebc0ef3cf593 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 606.738771] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 606.738771] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2048MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 606.793919] env[69475]: DEBUG nova.network.neutron [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Updating instance_info_cache with network_info: [{"id": "e1445b37-7f07-4058-88cb-07a6189aa684", "address": "fa:16:3e:55:90:7c", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.128", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1445b37-7f", "ovs_interfaceid": "e1445b37-7f07-4058-88cb-07a6189aa684", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.839160] env[69475]: DEBUG oslo_concurrency.lockutils [req-225459be-6bc7-4b45-a23a-dd72bc9490be req-68e19b05-065d-4963-8bf1-39a869fd0bf1 service nova] Releasing lock "refresh_cache-ec7a6b3c-2a2f-4edd-8b79-ba55551d6159" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.892035] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e509105-a194-4cc0-aa98-2b6e9e76ad2e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.900034] env[69475]: DEBUG nova.network.neutron [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Successfully updated port: 17befd20-3223-44d2-87ad-fed58da5076e {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 606.902488] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb36b73-b37a-4fcd-a34e-b2dc71858dc6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.942693] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e307d2-a4d4-4d7a-a800-9c2675ed0c80 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.951394] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e423865-d00e-4960-91d3-8f964e695919 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.969725] env[69475]: DEBUG nova.compute.provider_tree [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 606.976016] env[69475]: INFO nova.compute.manager [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Took 20.20 seconds to build instance. [ 606.982434] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520f48e3-68bf-b04b-a944-fbdb2aa3c69d, 'name': SearchDatastore_Task, 'duration_secs': 0.015509} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.983066] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.983319] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 606.983571] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.983714] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.984267] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 606.984976] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2a949b0-e937-40f2-9678-ed8a166fc3a0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.993668] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 606.993847] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 606.994939] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7485648-5d7b-46f1-b466-ff582424e4da {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.003792] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Waiting for the task: (returnval){ [ 607.003792] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e4f447-f1ba-1e10-b3cc-768b06cc5875" [ 607.003792] env[69475]: _type = "Task" [ 607.003792] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.012742] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e4f447-f1ba-1e10-b3cc-768b06cc5875, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.021201] env[69475]: DEBUG nova.network.neutron [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Successfully created port: 806e8096-632b-4993-a27c-3eb4767e9d00 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 607.103384] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "dc2614b1-95b8-4887-8ca6-efe92921c926" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.512s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 607.204948] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': task-3507485, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.220492] env[69475]: DEBUG nova.compute.manager [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 607.249970] env[69475]: DEBUG nova.virt.hardware [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 607.249970] env[69475]: DEBUG nova.virt.hardware [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 607.249970] env[69475]: DEBUG nova.virt.hardware [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 607.250189] env[69475]: DEBUG nova.virt.hardware [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 607.250189] env[69475]: DEBUG nova.virt.hardware [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 607.250435] env[69475]: DEBUG nova.virt.hardware [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 607.250657] env[69475]: DEBUG nova.virt.hardware [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 607.250944] env[69475]: DEBUG nova.virt.hardware [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 607.251042] env[69475]: DEBUG nova.virt.hardware [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 607.251250] env[69475]: DEBUG nova.virt.hardware [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 607.252094] env[69475]: DEBUG nova.virt.hardware [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 607.252976] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c2896e-617b-4639-b7bb-32feaf7fd734 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.262147] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9933f4c9-5f1d-4a60-acd9-bcb5e255a2c3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.296733] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Releasing lock "refresh_cache-4465f156-09cc-4eba-90e4-be76f3010363" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 607.297109] env[69475]: DEBUG nova.compute.manager [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Instance network_info: |[{"id": "e1445b37-7f07-4058-88cb-07a6189aa684", "address": "fa:16:3e:55:90:7c", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.128", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1445b37-7f", "ovs_interfaceid": "e1445b37-7f07-4058-88cb-07a6189aa684", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 607.297576] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:90:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e1445b37-7f07-4058-88cb-07a6189aa684', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 607.306450] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Creating folder: Project (1f9ff44ddd0f4b2393e659ba2bd2cfa6). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 607.306574] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5fbf2cd1-5ad7-4f6e-bfdd-ea5045aa83d8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.317311] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Created folder: Project (1f9ff44ddd0f4b2393e659ba2bd2cfa6) in parent group-v700823. [ 607.317497] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Creating folder: Instances. Parent ref: group-v700836. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 607.317812] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f76919bf-6dcf-4d1c-8826-25854608884a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.329946] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Created folder: Instances in parent group-v700836. [ 607.330141] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 607.330341] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 607.330540] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-efa4617f-dc69-4c85-bcd5-d92e23eba5ea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.361824] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 607.361824] env[69475]: value = "task-3507488" [ 607.361824] env[69475]: _type = "Task" [ 607.361824] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.372022] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507488, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.411945] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Acquiring lock "refresh_cache-48bc79bc-df56-4523-808f-a71b391062b9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.412118] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Acquired lock "refresh_cache-48bc79bc-df56-4523-808f-a71b391062b9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 607.412267] env[69475]: DEBUG nova.network.neutron [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 607.420852] env[69475]: DEBUG oslo_concurrency.lockutils [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Acquiring lock "b255f4d7-b177-4d6c-8a28-dcb5a179c1c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 607.421446] env[69475]: DEBUG oslo_concurrency.lockutils [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Lock "b255f4d7-b177-4d6c-8a28-dcb5a179c1c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 607.477305] env[69475]: DEBUG nova.scheduler.client.report [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 607.480812] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdaa6f63-cade-472b-94ad-75b604621769 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "91d5b0db-63a5-4290-af9b-264a5ce4cd95" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.719s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 607.480917] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "91d5b0db-63a5-4290-af9b-264a5ce4cd95" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 16.484s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 607.481741] env[69475]: INFO nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] During sync_power_state the instance has a pending task (block_device_mapping). Skip. [ 607.481741] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "91d5b0db-63a5-4290-af9b-264a5ce4cd95" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 607.515390] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e4f447-f1ba-1e10-b3cc-768b06cc5875, 'name': SearchDatastore_Task, 'duration_secs': 0.010666} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.516073] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c44b5eb3-df66-4747-96a6-4c367497aa03 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.523410] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Waiting for the task: (returnval){ [ 607.523410] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5234e3a8-9606-a576-6e38-ff15d99031e5" [ 607.523410] env[69475]: _type = "Task" [ 607.523410] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.531933] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5234e3a8-9606-a576-6e38-ff15d99031e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.705584] env[69475]: DEBUG oslo_vmware.api [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': task-3507485, 'name': PowerOnVM_Task, 'duration_secs': 0.689233} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.705638] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 607.705875] env[69475]: INFO nova.compute.manager [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Took 12.06 seconds to spawn the instance on the hypervisor. [ 607.706598] env[69475]: DEBUG nova.compute.manager [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 607.707434] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5750c69-a8c3-40dc-9c99-640e852030aa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.873964] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507488, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.917345] env[69475]: DEBUG nova.network.neutron [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Successfully updated port: 56faa0c7-80a1-46f5-8167-4485e94846ea {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 607.924612] env[69475]: DEBUG nova.compute.manager [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 607.984439] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69475) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 607.984439] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.788s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 607.984439] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.565s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 607.985642] env[69475]: INFO nova.compute.claims [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 607.990060] env[69475]: DEBUG nova.network.neutron [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.036573] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5234e3a8-9606-a576-6e38-ff15d99031e5, 'name': SearchDatastore_Task, 'duration_secs': 0.01593} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.037008] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 608.041022] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] e1ecc905-22da-434a-8ddf-a66f88ab47fb/e1ecc905-22da-434a-8ddf-a66f88ab47fb.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 608.041022] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c12c1bd-eaf8-480c-ad1e-4c8240e4d1cf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.046896] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Waiting for the task: (returnval){ [ 608.046896] env[69475]: value = "task-3507489" [ 608.046896] env[69475]: _type = "Task" [ 608.046896] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.056213] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': task-3507489, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.210226] env[69475]: DEBUG nova.network.neutron [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Updating instance_info_cache with network_info: [{"id": "17befd20-3223-44d2-87ad-fed58da5076e", "address": "fa:16:3e:64:54:e8", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.243", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17befd20-32", "ovs_interfaceid": "17befd20-3223-44d2-87ad-fed58da5076e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.233612] env[69475]: INFO nova.compute.manager [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Took 20.50 seconds to build instance. [ 608.376818] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507488, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.422105] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Acquiring lock "refresh_cache-7be48799-ea4a-4e7f-95c2-637460596cfc" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.422105] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Acquired lock "refresh_cache-7be48799-ea4a-4e7f-95c2-637460596cfc" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.422105] env[69475]: DEBUG nova.network.neutron [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 608.459689] env[69475]: DEBUG oslo_concurrency.lockutils [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.558716] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': task-3507489, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.713617] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Releasing lock "refresh_cache-48bc79bc-df56-4523-808f-a71b391062b9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 608.713617] env[69475]: DEBUG nova.compute.manager [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Instance network_info: |[{"id": "17befd20-3223-44d2-87ad-fed58da5076e", "address": "fa:16:3e:64:54:e8", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.243", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17befd20-32", "ovs_interfaceid": "17befd20-3223-44d2-87ad-fed58da5076e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 608.713817] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:54:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '17befd20-3223-44d2-87ad-fed58da5076e', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 608.721694] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Creating folder: Project (ee2a30208a8e4a17a993e75cf6d76d11). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 608.722097] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-84f1a87e-2985-48f1-93b4-693e6e5932a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.733986] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Created folder: Project (ee2a30208a8e4a17a993e75cf6d76d11) in parent group-v700823. [ 608.733986] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Creating folder: Instances. Parent ref: group-v700839. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 608.733986] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-487d1043-fec1-4328-b7ee-bc96a59a2fcd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.737639] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d2dd7545-6745-4477-8bb2-485212fbddc8 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Lock "ec7a6b3c-2a2f-4edd-8b79-ba55551d6159" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.019s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 608.742019] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Created folder: Instances in parent group-v700839. [ 608.742305] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 608.742438] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 608.742633] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c78b1394-6d13-4f94-bf8b-745c74c60a20 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.777016] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 608.777016] env[69475]: value = "task-3507492" [ 608.777016] env[69475]: _type = "Task" [ 608.777016] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.784673] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507492, 'name': CreateVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.871936] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507488, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.898266] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Acquiring lock "67287947-ecce-4462-8268-23bcc7421766" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.898266] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Lock "67287947-ecce-4462-8268-23bcc7421766" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 609.064789] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': task-3507489, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521992} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.068743] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] e1ecc905-22da-434a-8ddf-a66f88ab47fb/e1ecc905-22da-434a-8ddf-a66f88ab47fb.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 609.070083] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 609.070083] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-30e030ed-4f83-4b9d-a3b7-4886ccde17d8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.080204] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Waiting for the task: (returnval){ [ 609.080204] env[69475]: value = "task-3507493" [ 609.080204] env[69475]: _type = "Task" [ 609.080204] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.095968] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': task-3507493, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.148565] env[69475]: DEBUG nova.network.neutron [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 609.240896] env[69475]: DEBUG nova.network.neutron [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Successfully updated port: 806e8096-632b-4993-a27c-3eb4767e9d00 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 609.267524] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8437371-343c-4714-a335-cc0b42d023cb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.283036] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17fbd990-5925-4c0b-87ff-dbc7434b5577 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.289599] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507492, 'name': CreateVM_Task, 'duration_secs': 0.463985} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.290171] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 609.290976] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.291150] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.291458] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 609.291698] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f81eaad0-8ffc-4f6c-aaa7-f6645c765c57 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.318976] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-118cd97f-c5cc-4157-91a3-a9f930d4d54b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.324476] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Waiting for the task: (returnval){ [ 609.324476] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cdd0f3-1e54-800d-6387-536aec5ed4ef" [ 609.324476] env[69475]: _type = "Task" [ 609.324476] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.332664] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4289c67-48e5-4e3b-83f6-241b1bebf425 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.340893] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cdd0f3-1e54-800d-6387-536aec5ed4ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.355320] env[69475]: DEBUG nova.compute.provider_tree [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.372583] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507488, 'name': CreateVM_Task, 'duration_secs': 1.684957} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.373413] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 609.374191] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.401051] env[69475]: DEBUG nova.compute.manager [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 609.479366] env[69475]: DEBUG nova.network.neutron [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Updating instance_info_cache with network_info: [{"id": "56faa0c7-80a1-46f5-8167-4485e94846ea", "address": "fa:16:3e:60:39:c6", "network": {"id": "57d3fe81-db66-46d5-ba0f-eec2a4cfd7cb", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1825674224-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d1d7ffc7ecd34b918b1998fdc97c3425", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56faa0c7-80", "ovs_interfaceid": "56faa0c7-80a1-46f5-8167-4485e94846ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.592196] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': task-3507493, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091025} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.592466] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 609.593835] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77627c2e-327c-474a-9b1e-5f5c6fbca334 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.621827] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] e1ecc905-22da-434a-8ddf-a66f88ab47fb/e1ecc905-22da-434a-8ddf-a66f88ab47fb.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 609.623991] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f238350d-ccc6-4aba-a7c6-3ed29eff37f1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.642891] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Waiting for the task: (returnval){ [ 609.642891] env[69475]: value = "task-3507494" [ 609.642891] env[69475]: _type = "Task" [ 609.642891] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.652747] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': task-3507494, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.668441] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Acquiring lock "af5dc581-cf6a-4b84-8bcf-96606ae07cc1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 609.669129] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Lock "af5dc581-cf6a-4b84-8bcf-96606ae07cc1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 609.744173] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Acquiring lock "refresh_cache-9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.744355] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Acquired lock "refresh_cache-9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.744424] env[69475]: DEBUG nova.network.neutron [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 609.837935] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cdd0f3-1e54-800d-6387-536aec5ed4ef, 'name': SearchDatastore_Task, 'duration_secs': 0.018421} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.838276] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 609.838505] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 609.838741] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.838883] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.839072] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 609.839354] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 609.839845] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 609.840113] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-293625d5-6bed-404e-aed3-6869593ec858 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.842649] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e35c8c9d-b58c-4ada-9125-3307af67931d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.849132] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 609.849132] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cee5f5-f14e-7c2e-152c-450c0e6df0d4" [ 609.849132] env[69475]: _type = "Task" [ 609.849132] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.856116] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 609.856342] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 609.858398] env[69475]: DEBUG nova.scheduler.client.report [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 609.862373] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-246faa01-bf32-49ff-b7c5-fdbc8586b2f4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.871324] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cee5f5-f14e-7c2e-152c-450c0e6df0d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.875333] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Waiting for the task: (returnval){ [ 609.875333] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520b28d9-bff4-7ad4-9b04-b7710854f99b" [ 609.875333] env[69475]: _type = "Task" [ 609.875333] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.888861] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520b28d9-bff4-7ad4-9b04-b7710854f99b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.927957] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 609.984217] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Releasing lock "refresh_cache-7be48799-ea4a-4e7f-95c2-637460596cfc" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 609.984571] env[69475]: DEBUG nova.compute.manager [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Instance network_info: |[{"id": "56faa0c7-80a1-46f5-8167-4485e94846ea", "address": "fa:16:3e:60:39:c6", "network": {"id": "57d3fe81-db66-46d5-ba0f-eec2a4cfd7cb", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1825674224-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d1d7ffc7ecd34b918b1998fdc97c3425", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56faa0c7-80", "ovs_interfaceid": "56faa0c7-80a1-46f5-8167-4485e94846ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 609.985019] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:39:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'da623279-b6f6-4570-8b15-a332120b8b60', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '56faa0c7-80a1-46f5-8167-4485e94846ea', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 609.995254] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Creating folder: Project (d1d7ffc7ecd34b918b1998fdc97c3425). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 609.995586] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-990ec06a-1c7d-4169-9ea3-782855fd685e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.007519] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Created folder: Project (d1d7ffc7ecd34b918b1998fdc97c3425) in parent group-v700823. [ 610.007519] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Creating folder: Instances. Parent ref: group-v700842. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 610.007666] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df1b18ac-8abf-433c-8558-ae91aca21bad {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.018771] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Created folder: Instances in parent group-v700842. [ 610.021637] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 610.021637] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 610.021637] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87b0830b-2abc-4e31-9645-e01a909f9c4e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.044691] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 610.044691] env[69475]: value = "task-3507497" [ 610.044691] env[69475]: _type = "Task" [ 610.044691] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.055786] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507497, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.159675] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': task-3507494, 'name': ReconfigVM_Task, 'duration_secs': 0.270301} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.159675] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Reconfigured VM instance instance-00000004 to attach disk [datastore1] e1ecc905-22da-434a-8ddf-a66f88ab47fb/e1ecc905-22da-434a-8ddf-a66f88ab47fb.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 610.160787] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f96ba95-c814-4b30-8f8d-f9dfc038541c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.174400] env[69475]: DEBUG nova.compute.manager [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 610.182164] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Waiting for the task: (returnval){ [ 610.182164] env[69475]: value = "task-3507498" [ 610.182164] env[69475]: _type = "Task" [ 610.182164] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.203193] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': task-3507498, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.330133] env[69475]: DEBUG nova.compute.manager [req-ec257086-5f72-4c60-a2c3-c317a94f963b req-5a932ca2-ce3f-40c2-a35a-181c4889a70d service nova] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Received event network-vif-plugged-17befd20-3223-44d2-87ad-fed58da5076e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 610.331064] env[69475]: DEBUG oslo_concurrency.lockutils [req-ec257086-5f72-4c60-a2c3-c317a94f963b req-5a932ca2-ce3f-40c2-a35a-181c4889a70d service nova] Acquiring lock "48bc79bc-df56-4523-808f-a71b391062b9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.331581] env[69475]: DEBUG oslo_concurrency.lockutils [req-ec257086-5f72-4c60-a2c3-c317a94f963b req-5a932ca2-ce3f-40c2-a35a-181c4889a70d service nova] Lock "48bc79bc-df56-4523-808f-a71b391062b9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.331930] env[69475]: DEBUG oslo_concurrency.lockutils [req-ec257086-5f72-4c60-a2c3-c317a94f963b req-5a932ca2-ce3f-40c2-a35a-181c4889a70d service nova] Lock "48bc79bc-df56-4523-808f-a71b391062b9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.332396] env[69475]: DEBUG nova.compute.manager [req-ec257086-5f72-4c60-a2c3-c317a94f963b req-5a932ca2-ce3f-40c2-a35a-181c4889a70d service nova] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] No waiting events found dispatching network-vif-plugged-17befd20-3223-44d2-87ad-fed58da5076e {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 610.332715] env[69475]: WARNING nova.compute.manager [req-ec257086-5f72-4c60-a2c3-c317a94f963b req-5a932ca2-ce3f-40c2-a35a-181c4889a70d service nova] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Received unexpected event network-vif-plugged-17befd20-3223-44d2-87ad-fed58da5076e for instance with vm_state building and task_state spawning. [ 610.336972] env[69475]: DEBUG nova.network.neutron [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 610.363667] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cee5f5-f14e-7c2e-152c-450c0e6df0d4, 'name': SearchDatastore_Task, 'duration_secs': 0.014837} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.364047] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.364634] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 610.365483] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.367446] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.384s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.368087] env[69475]: DEBUG nova.compute.manager [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 610.377201] env[69475]: DEBUG nova.compute.manager [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Received event network-vif-plugged-9cdc8cda-340c-4ebc-884a-d52746c1cda6 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 610.377467] env[69475]: DEBUG oslo_concurrency.lockutils [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] Acquiring lock "e1ecc905-22da-434a-8ddf-a66f88ab47fb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.377684] env[69475]: DEBUG oslo_concurrency.lockutils [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] Lock "e1ecc905-22da-434a-8ddf-a66f88ab47fb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.377840] env[69475]: DEBUG oslo_concurrency.lockutils [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] Lock "e1ecc905-22da-434a-8ddf-a66f88ab47fb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.378051] env[69475]: DEBUG nova.compute.manager [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] No waiting events found dispatching network-vif-plugged-9cdc8cda-340c-4ebc-884a-d52746c1cda6 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 610.378209] env[69475]: WARNING nova.compute.manager [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Received unexpected event network-vif-plugged-9cdc8cda-340c-4ebc-884a-d52746c1cda6 for instance with vm_state building and task_state spawning. [ 610.378373] env[69475]: DEBUG nova.compute.manager [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Received event network-changed-9cdc8cda-340c-4ebc-884a-d52746c1cda6 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 610.378518] env[69475]: DEBUG nova.compute.manager [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Refreshing instance network info cache due to event network-changed-9cdc8cda-340c-4ebc-884a-d52746c1cda6. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 610.378699] env[69475]: DEBUG oslo_concurrency.lockutils [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] Acquiring lock "refresh_cache-e1ecc905-22da-434a-8ddf-a66f88ab47fb" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.378824] env[69475]: DEBUG oslo_concurrency.lockutils [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] Acquired lock "refresh_cache-e1ecc905-22da-434a-8ddf-a66f88ab47fb" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 610.378976] env[69475]: DEBUG nova.network.neutron [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Refreshing network info cache for port 9cdc8cda-340c-4ebc-884a-d52746c1cda6 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 610.380445] env[69475]: DEBUG oslo_concurrency.lockutils [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.921s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.384408] env[69475]: INFO nova.compute.claims [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 610.416442] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520b28d9-bff4-7ad4-9b04-b7710854f99b, 'name': SearchDatastore_Task, 'duration_secs': 0.024967} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.417344] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5831bd72-d45b-4f53-ad54-4bc1b54dbffc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.423700] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Waiting for the task: (returnval){ [ 610.423700] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524f6265-48ff-7a24-4318-16211e65e6a8" [ 610.423700] env[69475]: _type = "Task" [ 610.423700] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.436248] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524f6265-48ff-7a24-4318-16211e65e6a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.555887] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507497, 'name': CreateVM_Task, 'duration_secs': 0.359844} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.556384] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 610.557196] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.557533] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 610.557954] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 610.558340] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3bcd6a9a-2bd4-423f-9989-67bc73b2a430 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.563342] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Waiting for the task: (returnval){ [ 610.563342] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52baff2b-3b89-d942-eccc-51d449e471fb" [ 610.563342] env[69475]: _type = "Task" [ 610.563342] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.571375] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52baff2b-3b89-d942-eccc-51d449e471fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.703748] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': task-3507498, 'name': Rename_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.717307] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.797835] env[69475]: DEBUG nova.network.neutron [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Updating instance_info_cache with network_info: [{"id": "806e8096-632b-4993-a27c-3eb4767e9d00", "address": "fa:16:3e:db:bb:e6", "network": {"id": "1d7c82a8-9307-4c3e-938e-03ec482c5ac1", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1922599493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25e8170617f6470dbcf8c36752c83214", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27e0a333-0cad-496c-8e6e-37a2edc97ac4", "external-id": "nsx-vlan-transportzone-83", "segmentation_id": 83, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap806e8096-63", "ovs_interfaceid": "806e8096-632b-4993-a27c-3eb4767e9d00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 610.882648] env[69475]: DEBUG nova.compute.utils [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 610.884032] env[69475]: DEBUG nova.compute.manager [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 610.887759] env[69475]: DEBUG nova.network.neutron [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 610.944671] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524f6265-48ff-7a24-4318-16211e65e6a8, 'name': SearchDatastore_Task, 'duration_secs': 0.013604} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.944671] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.944671] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 48bc79bc-df56-4523-808f-a71b391062b9/48bc79bc-df56-4523-808f-a71b391062b9.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 610.944998] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 610.944998] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 610.945945] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2fe11d0-8566-47cf-8584-c6632bcc5e70 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.948896] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9165170f-7c5e-442d-822a-7fb78ea8cc48 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.956592] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Waiting for the task: (returnval){ [ 610.956592] env[69475]: value = "task-3507499" [ 610.956592] env[69475]: _type = "Task" [ 610.956592] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.961898] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 610.961898] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 610.962554] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a3a7a17-579f-4df4-9807-d77f44f237a2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.969700] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507499, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.972613] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 610.972613] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525644b8-b919-b8df-8668-1eff151a1403" [ 610.972613] env[69475]: _type = "Task" [ 610.972613] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.982158] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525644b8-b919-b8df-8668-1eff151a1403, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.075190] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52baff2b-3b89-d942-eccc-51d449e471fb, 'name': SearchDatastore_Task, 'duration_secs': 0.019691} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.075492] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 611.075726] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 611.075937] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.198583] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': task-3507498, 'name': Rename_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.207907] env[69475]: DEBUG nova.policy [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2570dc62118b4091b27c1e64f4752558', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '947a74cfc69b45dbb3aa09060c5b76f4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 611.302660] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Releasing lock "refresh_cache-9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 611.303774] env[69475]: DEBUG nova.compute.manager [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Instance network_info: |[{"id": "806e8096-632b-4993-a27c-3eb4767e9d00", "address": "fa:16:3e:db:bb:e6", "network": {"id": "1d7c82a8-9307-4c3e-938e-03ec482c5ac1", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1922599493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25e8170617f6470dbcf8c36752c83214", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27e0a333-0cad-496c-8e6e-37a2edc97ac4", "external-id": "nsx-vlan-transportzone-83", "segmentation_id": 83, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap806e8096-63", "ovs_interfaceid": "806e8096-632b-4993-a27c-3eb4767e9d00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 611.304572] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:bb:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27e0a333-0cad-496c-8e6e-37a2edc97ac4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '806e8096-632b-4993-a27c-3eb4767e9d00', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 611.322056] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Creating folder: Project (25e8170617f6470dbcf8c36752c83214). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 611.326125] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fd8b6708-5365-4767-9115-d520f26af797 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.337779] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Created folder: Project (25e8170617f6470dbcf8c36752c83214) in parent group-v700823. [ 611.338176] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Creating folder: Instances. Parent ref: group-v700845. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 611.338497] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-61dc67a3-87b4-466f-9a65-6849c62014c0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.353460] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Created folder: Instances in parent group-v700845. [ 611.353848] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 611.354141] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 611.354251] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-78a5ebdb-2878-4bc4-8d5a-9ea7f11fe782 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.389870] env[69475]: DEBUG nova.compute.manager [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 611.392963] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 611.392963] env[69475]: value = "task-3507502" [ 611.392963] env[69475]: _type = "Task" [ 611.392963] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.408837] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507502, 'name': CreateVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.475812] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507499, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.497468] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525644b8-b919-b8df-8668-1eff151a1403, 'name': SearchDatastore_Task, 'duration_secs': 0.022065} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.498823] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-915430e4-a7da-4181-98cd-bb94bebb8f3c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.507968] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 611.507968] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5298b767-bd1e-a23b-964a-a3861151a44b" [ 611.507968] env[69475]: _type = "Task" [ 611.507968] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.522362] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5298b767-bd1e-a23b-964a-a3861151a44b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.680486] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fc39e89-706c-497d-8a5b-030ba2c8b6bf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.693083] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-630629ca-24b3-476e-a9e0-25de763065bc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.735136] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': task-3507498, 'name': Rename_Task, 'duration_secs': 1.139771} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.735972] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80ae2626-5330-4ecd-b560-247c55f9c63d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.740085] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 611.740085] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7974fb44-f805-4601-96e8-c0c2e63ec0d8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.748737] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51432a55-0ee3-4788-94c4-8ab4dd631777 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.752937] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Waiting for the task: (returnval){ [ 611.752937] env[69475]: value = "task-3507503" [ 611.752937] env[69475]: _type = "Task" [ 611.752937] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.767099] env[69475]: DEBUG nova.compute.provider_tree [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 611.774131] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': task-3507503, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.918390] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507502, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.975626] env[69475]: DEBUG nova.network.neutron [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Updated VIF entry in instance network info cache for port 9cdc8cda-340c-4ebc-884a-d52746c1cda6. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 611.976305] env[69475]: DEBUG nova.network.neutron [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Updating instance_info_cache with network_info: [{"id": "9cdc8cda-340c-4ebc-884a-d52746c1cda6", "address": "fa:16:3e:07:5a:79", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.66", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9cdc8cda-34", "ovs_interfaceid": "9cdc8cda-340c-4ebc-884a-d52746c1cda6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.977255] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507499, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.760127} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.978518] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 48bc79bc-df56-4523-808f-a71b391062b9/48bc79bc-df56-4523-808f-a71b391062b9.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 611.978739] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 611.979326] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9d815cc1-0165-471a-8eae-3738da47b8f3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.987360] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Waiting for the task: (returnval){ [ 611.987360] env[69475]: value = "task-3507504" [ 611.987360] env[69475]: _type = "Task" [ 611.987360] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.005724] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.005724] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.016455] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507504, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.026074] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5298b767-bd1e-a23b-964a-a3861151a44b, 'name': SearchDatastore_Task, 'duration_secs': 0.055354} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.026558] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 612.026558] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 4465f156-09cc-4eba-90e4-be76f3010363/4465f156-09cc-4eba-90e4-be76f3010363.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 612.026852] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.027050] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 612.027308] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43e8480c-c45a-43ec-a357-7fa94ae1396c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.031935] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1023b953-ff4a-47c0-850b-8a331f3f76ff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.041842] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 612.041842] env[69475]: value = "task-3507505" [ 612.041842] env[69475]: _type = "Task" [ 612.041842] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.045307] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 612.047511] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 612.047511] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29ec3eba-8a71-4410-a5dd-ebf5e92c9a49 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.053242] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507505, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.056345] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Waiting for the task: (returnval){ [ 612.056345] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b90537-50af-5008-091f-d6016c1814c2" [ 612.056345] env[69475]: _type = "Task" [ 612.056345] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.064858] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b90537-50af-5008-091f-d6016c1814c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.270664] env[69475]: DEBUG nova.scheduler.client.report [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 612.276035] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': task-3507503, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.380912] env[69475]: DEBUG nova.network.neutron [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Successfully created port: 73a9904f-d8b5-4a55-8338-3f26cce4f9f7 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 612.411068] env[69475]: DEBUG nova.compute.manager [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 612.417360] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507502, 'name': CreateVM_Task, 'duration_secs': 0.543096} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.420065] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 612.420065] env[69475]: DEBUG oslo_vmware.service [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d077c5f-b03c-4b5c-9826-828ff2bef059 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.427432] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.427578] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.428432] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 612.428432] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e73f022-626d-4c1d-872c-f23cd39e5e39 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.434171] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Waiting for the task: (returnval){ [ 612.434171] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d59547-4908-decc-cb2c-f988c241d276" [ 612.434171] env[69475]: _type = "Task" [ 612.434171] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.448851] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d59547-4908-decc-cb2c-f988c241d276, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.458076] env[69475]: DEBUG nova.virt.hardware [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:35:30Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1854059945',id=27,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-542614626',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 612.458309] env[69475]: DEBUG nova.virt.hardware [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 612.458458] env[69475]: DEBUG nova.virt.hardware [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 612.458680] env[69475]: DEBUG nova.virt.hardware [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 612.458833] env[69475]: DEBUG nova.virt.hardware [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 612.459491] env[69475]: DEBUG nova.virt.hardware [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 612.459491] env[69475]: DEBUG nova.virt.hardware [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 612.459491] env[69475]: DEBUG nova.virt.hardware [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 612.459491] env[69475]: DEBUG nova.virt.hardware [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 612.459652] env[69475]: DEBUG nova.virt.hardware [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 612.459876] env[69475]: DEBUG nova.virt.hardware [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 612.460813] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816c7365-a530-43f3-b349-cc8abfa51a6d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.471722] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-300f958f-efff-4efe-ae4b-ba1b608df979 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.493166] env[69475]: DEBUG oslo_concurrency.lockutils [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] Releasing lock "refresh_cache-e1ecc905-22da-434a-8ddf-a66f88ab47fb" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 612.493466] env[69475]: DEBUG nova.compute.manager [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Received event network-vif-plugged-e1445b37-7f07-4058-88cb-07a6189aa684 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 612.493893] env[69475]: DEBUG oslo_concurrency.lockutils [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] Acquiring lock "4465f156-09cc-4eba-90e4-be76f3010363-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.493965] env[69475]: DEBUG oslo_concurrency.lockutils [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] Lock "4465f156-09cc-4eba-90e4-be76f3010363-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.495367] env[69475]: DEBUG oslo_concurrency.lockutils [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] Lock "4465f156-09cc-4eba-90e4-be76f3010363-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 612.495367] env[69475]: DEBUG nova.compute.manager [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] No waiting events found dispatching network-vif-plugged-e1445b37-7f07-4058-88cb-07a6189aa684 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 612.495367] env[69475]: WARNING nova.compute.manager [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Received unexpected event network-vif-plugged-e1445b37-7f07-4058-88cb-07a6189aa684 for instance with vm_state building and task_state spawning. [ 612.495367] env[69475]: DEBUG nova.compute.manager [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Received event network-changed-e1445b37-7f07-4058-88cb-07a6189aa684 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 612.495367] env[69475]: DEBUG nova.compute.manager [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Refreshing instance network info cache due to event network-changed-e1445b37-7f07-4058-88cb-07a6189aa684. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 612.495709] env[69475]: DEBUG oslo_concurrency.lockutils [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] Acquiring lock "refresh_cache-4465f156-09cc-4eba-90e4-be76f3010363" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.495709] env[69475]: DEBUG oslo_concurrency.lockutils [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] Acquired lock "refresh_cache-4465f156-09cc-4eba-90e4-be76f3010363" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.495709] env[69475]: DEBUG nova.network.neutron [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Refreshing network info cache for port e1445b37-7f07-4058-88cb-07a6189aa684 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 612.510917] env[69475]: DEBUG nova.compute.manager [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 612.514065] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507504, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08063} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.516408] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 612.516790] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f71b12-5667-4f05-bc27-89bd9ad7cfd8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.573921] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 48bc79bc-df56-4523-808f-a71b391062b9/48bc79bc-df56-4523-808f-a71b391062b9.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 612.583954] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b4ea1e0-0e42-469f-ba37-c4adc4dd668e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.610050] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507505, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.612964] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Waiting for the task: (returnval){ [ 612.612964] env[69475]: value = "task-3507506" [ 612.612964] env[69475]: _type = "Task" [ 612.612964] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.620749] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b90537-50af-5008-091f-d6016c1814c2, 'name': SearchDatastore_Task, 'duration_secs': 0.009391} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.622495] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81ebc76f-948b-4dfa-a619-a3fec138f468 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.630244] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507506, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.631815] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Waiting for the task: (returnval){ [ 612.631815] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d5f980-d1d5-5e9f-fcd8-4cb5ef397ec8" [ 612.631815] env[69475]: _type = "Task" [ 612.631815] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.642841] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d5f980-d1d5-5e9f-fcd8-4cb5ef397ec8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.766578] env[69475]: DEBUG oslo_vmware.api [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': task-3507503, 'name': PowerOnVM_Task, 'duration_secs': 0.575636} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.766855] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 612.767049] env[69475]: INFO nova.compute.manager [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Took 14.86 seconds to spawn the instance on the hypervisor. [ 612.767226] env[69475]: DEBUG nova.compute.manager [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 612.768077] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18927f31-7cb8-4774-b8bb-c29a2f08aea5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.778633] env[69475]: DEBUG oslo_concurrency.lockutils [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.398s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 612.779140] env[69475]: DEBUG nova.compute.manager [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 612.784263] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.854s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.785346] env[69475]: INFO nova.compute.claims [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 612.947519] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 612.947519] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 612.947519] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.947519] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.947729] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 612.947729] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6478fd37-7697-49b5-afff-1fbe0c9fc4f2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.961579] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 612.961579] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 612.962453] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139b7952-8354-46ea-b864-1ab290582295 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.969337] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2636f1f6-f0a9-46a1-aad3-66b64bfcae5a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.974276] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Waiting for the task: (returnval){ [ 612.974276] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e0f2e8-9c1c-5ce7-b4cd-baf09e7cb5d8" [ 612.974276] env[69475]: _type = "Task" [ 612.974276] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.981720] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e0f2e8-9c1c-5ce7-b4cd-baf09e7cb5d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.045799] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.065519] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507505, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.60345} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.065778] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 4465f156-09cc-4eba-90e4-be76f3010363/4465f156-09cc-4eba-90e4-be76f3010363.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 613.066313] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 613.066313] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f77fc3b0-5c25-46b5-ab46-e8664042f717 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.072865] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 613.072865] env[69475]: value = "task-3507507" [ 613.072865] env[69475]: _type = "Task" [ 613.072865] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.085223] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507507, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.100823] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Acquiring lock "dc2614b1-95b8-4887-8ca6-efe92921c926" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.101118] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Lock "dc2614b1-95b8-4887-8ca6-efe92921c926" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.101313] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Acquiring lock "dc2614b1-95b8-4887-8ca6-efe92921c926-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.101510] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Lock "dc2614b1-95b8-4887-8ca6-efe92921c926-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.101709] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Lock "dc2614b1-95b8-4887-8ca6-efe92921c926-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 613.104196] env[69475]: INFO nova.compute.manager [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Terminating instance [ 613.123348] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507506, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.142222] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d5f980-d1d5-5e9f-fcd8-4cb5ef397ec8, 'name': SearchDatastore_Task, 'duration_secs': 0.01605} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.142451] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.142664] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 7be48799-ea4a-4e7f-95c2-637460596cfc/7be48799-ea4a-4e7f-95c2-637460596cfc.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 613.142907] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40b43079-d511-4e0d-a8b5-2a72c26bbbbe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.149066] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Waiting for the task: (returnval){ [ 613.149066] env[69475]: value = "task-3507508" [ 613.149066] env[69475]: _type = "Task" [ 613.149066] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.157943] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': task-3507508, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.158332] env[69475]: DEBUG oslo_concurrency.lockutils [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Acquiring lock "ec7a6b3c-2a2f-4edd-8b79-ba55551d6159" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.158995] env[69475]: DEBUG oslo_concurrency.lockutils [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Lock "ec7a6b3c-2a2f-4edd-8b79-ba55551d6159" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.158995] env[69475]: DEBUG oslo_concurrency.lockutils [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Acquiring lock "ec7a6b3c-2a2f-4edd-8b79-ba55551d6159-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.159158] env[69475]: DEBUG oslo_concurrency.lockutils [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Lock "ec7a6b3c-2a2f-4edd-8b79-ba55551d6159-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.159943] env[69475]: DEBUG oslo_concurrency.lockutils [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Lock "ec7a6b3c-2a2f-4edd-8b79-ba55551d6159-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 613.162332] env[69475]: INFO nova.compute.manager [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Terminating instance [ 613.286425] env[69475]: INFO nova.compute.manager [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Took 25.25 seconds to build instance. [ 613.293882] env[69475]: DEBUG nova.compute.utils [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 613.296816] env[69475]: DEBUG nova.compute.manager [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 613.297104] env[69475]: DEBUG nova.network.neutron [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 613.328662] env[69475]: DEBUG nova.network.neutron [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Updated VIF entry in instance network info cache for port e1445b37-7f07-4058-88cb-07a6189aa684. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 613.329225] env[69475]: DEBUG nova.network.neutron [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Updating instance_info_cache with network_info: [{"id": "e1445b37-7f07-4058-88cb-07a6189aa684", "address": "fa:16:3e:55:90:7c", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.128", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1445b37-7f", "ovs_interfaceid": "e1445b37-7f07-4058-88cb-07a6189aa684", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.428838] env[69475]: DEBUG nova.policy [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '156a0d2281654a8c85c6580ea18091a5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2b8dc36bed3a4bf5ae76fbc0c3c252a9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 613.487042] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Preparing fetch location {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 613.487331] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Creating directory with path [datastore2] vmware_temp/19cd01f2-3bb9-4c6c-a489-8cc821947ed6/afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 613.487573] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-01635e53-4a90-4673-a100-55d4775a4dd5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.502628] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Created directory with path [datastore2] vmware_temp/19cd01f2-3bb9-4c6c-a489-8cc821947ed6/afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 613.503826] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Fetch image to [datastore2] vmware_temp/19cd01f2-3bb9-4c6c-a489-8cc821947ed6/afa9d32c-9f39-44fb-bf3b-50d35842a59f/tmp-sparse.vmdk {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 613.503826] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Downloading image file data afa9d32c-9f39-44fb-bf3b-50d35842a59f to [datastore2] vmware_temp/19cd01f2-3bb9-4c6c-a489-8cc821947ed6/afa9d32c-9f39-44fb-bf3b-50d35842a59f/tmp-sparse.vmdk on the data store datastore2 {{(pid=69475) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 613.504850] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8970d636-fe14-494d-823d-6b91e6e6741d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.514667] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f41e57-190e-4576-8617-2c29197b0838 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.526864] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd83391-0153-41e8-874b-319459ccab7b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.568159] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ffe7b9-c00e-4c68-933e-98dd7bae8ee6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.579921] env[69475]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8bcf7690-856e-4c55-b3c3-61de9e8935f7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.588451] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507507, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.153431} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.588805] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 613.589759] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb93a82e-e56a-4a1e-9d06-392eb8ffaeab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.617119] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] 4465f156-09cc-4eba-90e4-be76f3010363/4465f156-09cc-4eba-90e4-be76f3010363.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 613.617950] env[69475]: DEBUG nova.compute.manager [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 613.618360] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 613.618575] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24c9eb62-5ed9-4c4c-be90-47b246939156 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.637388] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca9260b-e78a-4cad-8243-fa1655a4a659 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.650573] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 613.655177] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3476e9f-a656-4ddd-b351-f7f9354dfb65 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.657277] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 613.657277] env[69475]: value = "task-3507509" [ 613.657277] env[69475]: _type = "Task" [ 613.657277] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.669780] env[69475]: DEBUG nova.compute.manager [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 613.670060] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 613.670389] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': task-3507508, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.670636] env[69475]: DEBUG oslo_vmware.api [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Waiting for the task: (returnval){ [ 613.670636] env[69475]: value = "task-3507510" [ 613.670636] env[69475]: _type = "Task" [ 613.670636] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.676316] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ae973b-f963-4222-b8ac-c5ab02df9e5d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.679705] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507506, 'name': ReconfigVM_Task, 'duration_secs': 0.939777} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.685631] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 48bc79bc-df56-4523-808f-a71b391062b9/48bc79bc-df56-4523-808f-a71b391062b9.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 613.688809] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd0319b3-74ad-4024-9b83-0650209fa3f6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.691163] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Downloading image file data afa9d32c-9f39-44fb-bf3b-50d35842a59f to the data store datastore2 {{(pid=69475) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 613.700026] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 613.700158] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507509, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.700799] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-310b6812-821b-494c-8623-dad445973944 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.706551] env[69475]: DEBUG oslo_vmware.api [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Task: {'id': task-3507510, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.711434] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Waiting for the task: (returnval){ [ 613.711434] env[69475]: value = "task-3507511" [ 613.711434] env[69475]: _type = "Task" [ 613.711434] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.715987] env[69475]: DEBUG oslo_vmware.api [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Waiting for the task: (returnval){ [ 613.715987] env[69475]: value = "task-3507512" [ 613.715987] env[69475]: _type = "Task" [ 613.715987] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.724475] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507511, 'name': Rename_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.733257] env[69475]: DEBUG oslo_vmware.api [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': task-3507512, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.777266] env[69475]: DEBUG oslo_vmware.rw_handles [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/19cd01f2-3bb9-4c6c-a489-8cc821947ed6/afa9d32c-9f39-44fb-bf3b-50d35842a59f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69475) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 613.835591] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07f0a9a9-808f-452a-aa4a-23a6e35a1551 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Lock "e1ecc905-22da-434a-8ddf-a66f88ab47fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.839s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 613.836083] env[69475]: DEBUG nova.compute.manager [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 613.843447] env[69475]: DEBUG oslo_concurrency.lockutils [req-7d51335a-242c-4b14-8ef4-dbc02b5ef926 req-137034fb-761b-4b86-a3be-d9d3ad603936 service nova] Releasing lock "refresh_cache-4465f156-09cc-4eba-90e4-be76f3010363" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.959415] env[69475]: DEBUG nova.compute.manager [req-238a3ec5-26cd-46d6-a73c-b862bb45b212 req-03365070-5057-4034-ba95-743740c0deb6 service nova] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Received event network-changed-17befd20-3223-44d2-87ad-fed58da5076e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 613.960730] env[69475]: DEBUG nova.compute.manager [req-238a3ec5-26cd-46d6-a73c-b862bb45b212 req-03365070-5057-4034-ba95-743740c0deb6 service nova] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Refreshing instance network info cache due to event network-changed-17befd20-3223-44d2-87ad-fed58da5076e. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 613.962307] env[69475]: DEBUG oslo_concurrency.lockutils [req-238a3ec5-26cd-46d6-a73c-b862bb45b212 req-03365070-5057-4034-ba95-743740c0deb6 service nova] Acquiring lock "refresh_cache-48bc79bc-df56-4523-808f-a71b391062b9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.962307] env[69475]: DEBUG oslo_concurrency.lockutils [req-238a3ec5-26cd-46d6-a73c-b862bb45b212 req-03365070-5057-4034-ba95-743740c0deb6 service nova] Acquired lock "refresh_cache-48bc79bc-df56-4523-808f-a71b391062b9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 613.962307] env[69475]: DEBUG nova.network.neutron [req-238a3ec5-26cd-46d6-a73c-b862bb45b212 req-03365070-5057-4034-ba95-743740c0deb6 service nova] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Refreshing network info cache for port 17befd20-3223-44d2-87ad-fed58da5076e {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 613.990568] env[69475]: DEBUG nova.compute.manager [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Received event network-vif-plugged-56faa0c7-80a1-46f5-8167-4485e94846ea {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 613.990568] env[69475]: DEBUG oslo_concurrency.lockutils [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] Acquiring lock "7be48799-ea4a-4e7f-95c2-637460596cfc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.990568] env[69475]: DEBUG oslo_concurrency.lockutils [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] Lock "7be48799-ea4a-4e7f-95c2-637460596cfc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 613.990568] env[69475]: DEBUG oslo_concurrency.lockutils [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] Lock "7be48799-ea4a-4e7f-95c2-637460596cfc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 613.990568] env[69475]: DEBUG nova.compute.manager [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] No waiting events found dispatching network-vif-plugged-56faa0c7-80a1-46f5-8167-4485e94846ea {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 613.991151] env[69475]: WARNING nova.compute.manager [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Received unexpected event network-vif-plugged-56faa0c7-80a1-46f5-8167-4485e94846ea for instance with vm_state building and task_state spawning. [ 613.991151] env[69475]: DEBUG nova.compute.manager [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Received event network-changed-56faa0c7-80a1-46f5-8167-4485e94846ea {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 613.991151] env[69475]: DEBUG nova.compute.manager [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Refreshing instance network info cache due to event network-changed-56faa0c7-80a1-46f5-8167-4485e94846ea. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 613.991151] env[69475]: DEBUG oslo_concurrency.lockutils [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] Acquiring lock "refresh_cache-7be48799-ea4a-4e7f-95c2-637460596cfc" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.991151] env[69475]: DEBUG oslo_concurrency.lockutils [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] Acquired lock "refresh_cache-7be48799-ea4a-4e7f-95c2-637460596cfc" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 613.991400] env[69475]: DEBUG nova.network.neutron [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Refreshing network info cache for port 56faa0c7-80a1-46f5-8167-4485e94846ea {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 614.166655] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': task-3507508, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.731084} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.170303] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 7be48799-ea4a-4e7f-95c2-637460596cfc/7be48799-ea4a-4e7f-95c2-637460596cfc.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 614.170303] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 614.170303] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7609ec8-0488-4d58-8388-adb155c83323 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.176064] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6db47249-18e7-4943-b280-caa478a78d30 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.178569] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507509, 'name': ReconfigVM_Task, 'duration_secs': 0.469796} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.182430] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Reconfigured VM instance instance-00000005 to attach disk [datastore1] 4465f156-09cc-4eba-90e4-be76f3010363/4465f156-09cc-4eba-90e4-be76f3010363.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 614.183996] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-816b248b-c180-4c7c-82c6-cf18471c261a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.196638] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Waiting for the task: (returnval){ [ 614.196638] env[69475]: value = "task-3507513" [ 614.196638] env[69475]: _type = "Task" [ 614.196638] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.198714] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261cf0c9-ebb7-473c-9522-050d511fda86 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.212222] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 614.212222] env[69475]: value = "task-3507514" [ 614.212222] env[69475]: _type = "Task" [ 614.212222] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.212496] env[69475]: DEBUG oslo_vmware.api [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Task: {'id': task-3507510, 'name': PowerOffVM_Task, 'duration_secs': 0.335984} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.216518] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 614.218201] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 614.224367] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c4562b6-66a0-4e76-9cb3-83797696ac59 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.259934] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': task-3507513, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.267673] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bab2554-7d7a-4e57-ae13-d2c90c978ef4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.283567] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507514, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.283915] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507511, 'name': Rename_Task, 'duration_secs': 0.316728} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.285139] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 614.288899] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-deb239ef-d5b2-477b-acd2-0fe04cdd62c2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.291385] env[69475]: DEBUG oslo_vmware.api [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': task-3507512, 'name': PowerOffVM_Task, 'duration_secs': 0.339676} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.294223] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 614.294403] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 614.298436] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32325168-a899-4b6e-911a-6540bdf788d3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.301919] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5163823d-09fa-48c6-90f5-de9b167b40a8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.308168] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Waiting for the task: (returnval){ [ 614.308168] env[69475]: value = "task-3507516" [ 614.308168] env[69475]: _type = "Task" [ 614.308168] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.321496] env[69475]: DEBUG nova.compute.provider_tree [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 614.329288] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507516, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.334185] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 614.334185] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 614.334185] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Deleting the datastore file [datastore1] dc2614b1-95b8-4887-8ca6-efe92921c926 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 614.334785] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-631fd073-d55a-41f9-91fb-6f25c02e4535 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.342621] env[69475]: DEBUG oslo_vmware.api [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Waiting for the task: (returnval){ [ 614.342621] env[69475]: value = "task-3507518" [ 614.342621] env[69475]: _type = "Task" [ 614.342621] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.359403] env[69475]: DEBUG oslo_vmware.api [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Task: {'id': task-3507518, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.382082] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 614.382082] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 614.382082] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Deleting the datastore file [datastore1] ec7a6b3c-2a2f-4edd-8b79-ba55551d6159 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 614.382386] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7058fd9f-1a3e-47d7-a067-db2787a9a528 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.392031] env[69475]: DEBUG oslo_vmware.api [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Waiting for the task: (returnval){ [ 614.392031] env[69475]: value = "task-3507519" [ 614.392031] env[69475]: _type = "Task" [ 614.392031] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.405101] env[69475]: DEBUG oslo_vmware.api [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': task-3507519, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.451092] env[69475]: DEBUG nova.network.neutron [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Successfully created port: 267ce176-9932-4001-a96f-4e89c511fca3 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 614.465104] env[69475]: DEBUG oslo_vmware.rw_handles [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Completed reading data from the image iterator. {{(pid=69475) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 614.465283] env[69475]: DEBUG oslo_vmware.rw_handles [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/19cd01f2-3bb9-4c6c-a489-8cc821947ed6/afa9d32c-9f39-44fb-bf3b-50d35842a59f/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 614.615444] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Downloaded image file data afa9d32c-9f39-44fb-bf3b-50d35842a59f to vmware_temp/19cd01f2-3bb9-4c6c-a489-8cc821947ed6/afa9d32c-9f39-44fb-bf3b-50d35842a59f/tmp-sparse.vmdk on the data store datastore2 {{(pid=69475) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 614.616161] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Caching image {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 614.616404] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Copying Virtual Disk [datastore2] vmware_temp/19cd01f2-3bb9-4c6c-a489-8cc821947ed6/afa9d32c-9f39-44fb-bf3b-50d35842a59f/tmp-sparse.vmdk to [datastore2] vmware_temp/19cd01f2-3bb9-4c6c-a489-8cc821947ed6/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 614.616680] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1ba408b-cc3f-43c5-9a74-9331f1333d99 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.628102] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Waiting for the task: (returnval){ [ 614.628102] env[69475]: value = "task-3507520" [ 614.628102] env[69475]: _type = "Task" [ 614.628102] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.637931] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507520, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.700171] env[69475]: DEBUG nova.network.neutron [req-238a3ec5-26cd-46d6-a73c-b862bb45b212 req-03365070-5057-4034-ba95-743740c0deb6 service nova] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Updated VIF entry in instance network info cache for port 17befd20-3223-44d2-87ad-fed58da5076e. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 614.700532] env[69475]: DEBUG nova.network.neutron [req-238a3ec5-26cd-46d6-a73c-b862bb45b212 req-03365070-5057-4034-ba95-743740c0deb6 service nova] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Updating instance_info_cache with network_info: [{"id": "17befd20-3223-44d2-87ad-fed58da5076e", "address": "fa:16:3e:64:54:e8", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.243", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap17befd20-32", "ovs_interfaceid": "17befd20-3223-44d2-87ad-fed58da5076e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.714154] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': task-3507513, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086339} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.714154] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 614.714154] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac54365-9626-4318-be45-8255e8131963 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.729189] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507514, 'name': Rename_Task, 'duration_secs': 0.171129} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.742368] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 614.752806] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 7be48799-ea4a-4e7f-95c2-637460596cfc/7be48799-ea4a-4e7f-95c2-637460596cfc.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 614.753350] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-50d90228-bb18-4c53-bd3c-3eaa212cfc25 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.755521] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-27ad2e45-3677-44eb-b670-e66c703639a2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.777098] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Waiting for the task: (returnval){ [ 614.777098] env[69475]: value = "task-3507522" [ 614.777098] env[69475]: _type = "Task" [ 614.777098] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.779769] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 614.779769] env[69475]: value = "task-3507521" [ 614.779769] env[69475]: _type = "Task" [ 614.779769] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.793310] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': task-3507522, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.796605] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507521, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.807240] env[69475]: DEBUG nova.network.neutron [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Successfully updated port: 73a9904f-d8b5-4a55-8338-3f26cce4f9f7 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 614.821991] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507516, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.825933] env[69475]: DEBUG nova.scheduler.client.report [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 614.854193] env[69475]: DEBUG nova.compute.manager [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 614.864397] env[69475]: DEBUG oslo_vmware.api [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Task: {'id': task-3507518, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18914} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.864713] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 614.864949] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 614.865019] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 614.865183] env[69475]: INFO nova.compute.manager [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Took 1.25 seconds to destroy the instance on the hypervisor. [ 614.865422] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 614.865606] env[69475]: DEBUG nova.compute.manager [-] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 614.865698] env[69475]: DEBUG nova.network.neutron [-] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 614.887864] env[69475]: DEBUG nova.virt.hardware [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 614.888197] env[69475]: DEBUG nova.virt.hardware [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 614.888403] env[69475]: DEBUG nova.virt.hardware [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 614.888617] env[69475]: DEBUG nova.virt.hardware [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 614.888798] env[69475]: DEBUG nova.virt.hardware [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 614.889669] env[69475]: DEBUG nova.virt.hardware [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 614.889669] env[69475]: DEBUG nova.virt.hardware [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 614.889669] env[69475]: DEBUG nova.virt.hardware [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 614.889669] env[69475]: DEBUG nova.virt.hardware [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 614.889669] env[69475]: DEBUG nova.virt.hardware [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 614.890486] env[69475]: DEBUG nova.virt.hardware [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 614.892364] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5857356-af04-4864-84b9-e0b88e428601 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.903174] env[69475]: DEBUG oslo_vmware.api [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Task: {'id': task-3507519, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167155} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.905326] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 614.905582] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 614.905853] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 614.906103] env[69475]: INFO nova.compute.manager [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Took 1.24 seconds to destroy the instance on the hypervisor. [ 614.906385] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 614.906677] env[69475]: DEBUG nova.compute.manager [-] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 614.906810] env[69475]: DEBUG nova.network.neutron [-] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 614.912808] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4978fb6-3e2e-4514-bd9c-17fa6453e812 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.139213] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507520, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.203242] env[69475]: DEBUG oslo_concurrency.lockutils [req-238a3ec5-26cd-46d6-a73c-b862bb45b212 req-03365070-5057-4034-ba95-743740c0deb6 service nova] Releasing lock "refresh_cache-48bc79bc-df56-4523-808f-a71b391062b9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 615.293987] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': task-3507522, 'name': ReconfigVM_Task, 'duration_secs': 0.409851} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.301332] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 7be48799-ea4a-4e7f-95c2-637460596cfc/7be48799-ea4a-4e7f-95c2-637460596cfc.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 615.301332] env[69475]: DEBUG oslo_vmware.api [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507521, 'name': PowerOnVM_Task, 'duration_secs': 0.500545} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.301332] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d658d510-423d-4483-b0d8-f05fb7694939 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.302955] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 615.303095] env[69475]: INFO nova.compute.manager [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Took 15.11 seconds to spawn the instance on the hypervisor. [ 615.303522] env[69475]: DEBUG nova.compute.manager [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 615.304745] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0dd6d9f-d573-494e-863a-9a7b26f0269a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.309574] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquiring lock "refresh_cache-25c44ae0-4193-4833-85ec-ebc0ef3cf593" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.309651] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquired lock "refresh_cache-25c44ae0-4193-4833-85ec-ebc0ef3cf593" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.311232] env[69475]: DEBUG nova.network.neutron [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 615.314442] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Waiting for the task: (returnval){ [ 615.314442] env[69475]: value = "task-3507523" [ 615.314442] env[69475]: _type = "Task" [ 615.314442] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.336054] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.554s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 615.336702] env[69475]: DEBUG nova.compute.manager [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 615.339555] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507516, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.343604] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.626s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 615.346401] env[69475]: INFO nova.compute.claims [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 615.349920] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': task-3507523, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.462056] env[69475]: DEBUG nova.network.neutron [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Updated VIF entry in instance network info cache for port 56faa0c7-80a1-46f5-8167-4485e94846ea. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 615.463999] env[69475]: DEBUG nova.network.neutron [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Updating instance_info_cache with network_info: [{"id": "56faa0c7-80a1-46f5-8167-4485e94846ea", "address": "fa:16:3e:60:39:c6", "network": {"id": "57d3fe81-db66-46d5-ba0f-eec2a4cfd7cb", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1825674224-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d1d7ffc7ecd34b918b1998fdc97c3425", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56faa0c7-80", "ovs_interfaceid": "56faa0c7-80a1-46f5-8167-4485e94846ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.639650] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507520, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.835976] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507516, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.842506] env[69475]: INFO nova.compute.manager [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Took 27.37 seconds to build instance. [ 615.850327] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': task-3507523, 'name': Rename_Task, 'duration_secs': 0.213588} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.853836] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 615.855089] env[69475]: DEBUG nova.compute.utils [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 615.856754] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72bb08da-5e76-47da-86db-abe04a145179 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.860070] env[69475]: DEBUG nova.compute.manager [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Not allocating networking since 'none' was specified. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 615.867981] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Waiting for the task: (returnval){ [ 615.867981] env[69475]: value = "task-3507524" [ 615.867981] env[69475]: _type = "Task" [ 615.867981] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.882123] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': task-3507524, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.894424] env[69475]: DEBUG nova.network.neutron [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 615.965544] env[69475]: DEBUG oslo_concurrency.lockutils [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] Releasing lock "refresh_cache-7be48799-ea4a-4e7f-95c2-637460596cfc" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 615.967838] env[69475]: DEBUG nova.compute.manager [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Received event network-vif-plugged-806e8096-632b-4993-a27c-3eb4767e9d00 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 615.967838] env[69475]: DEBUG oslo_concurrency.lockutils [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] Acquiring lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 615.967838] env[69475]: DEBUG oslo_concurrency.lockutils [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] Lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 615.967838] env[69475]: DEBUG oslo_concurrency.lockutils [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] Lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 615.967838] env[69475]: DEBUG nova.compute.manager [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] No waiting events found dispatching network-vif-plugged-806e8096-632b-4993-a27c-3eb4767e9d00 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 615.968468] env[69475]: WARNING nova.compute.manager [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Received unexpected event network-vif-plugged-806e8096-632b-4993-a27c-3eb4767e9d00 for instance with vm_state building and task_state spawning. [ 615.968468] env[69475]: DEBUG nova.compute.manager [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Received event network-changed-806e8096-632b-4993-a27c-3eb4767e9d00 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 615.968468] env[69475]: DEBUG nova.compute.manager [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Refreshing instance network info cache due to event network-changed-806e8096-632b-4993-a27c-3eb4767e9d00. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 615.968468] env[69475]: DEBUG oslo_concurrency.lockutils [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] Acquiring lock "refresh_cache-9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.968468] env[69475]: DEBUG oslo_concurrency.lockutils [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] Acquired lock "refresh_cache-9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.968640] env[69475]: DEBUG nova.network.neutron [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Refreshing network info cache for port 806e8096-632b-4993-a27c-3eb4767e9d00 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 616.145368] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507520, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.448009} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.145368] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Copied Virtual Disk [datastore2] vmware_temp/19cd01f2-3bb9-4c6c-a489-8cc821947ed6/afa9d32c-9f39-44fb-bf3b-50d35842a59f/tmp-sparse.vmdk to [datastore2] vmware_temp/19cd01f2-3bb9-4c6c-a489-8cc821947ed6/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 616.145368] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Deleting the datastore file [datastore2] vmware_temp/19cd01f2-3bb9-4c6c-a489-8cc821947ed6/afa9d32c-9f39-44fb-bf3b-50d35842a59f/tmp-sparse.vmdk {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 616.145368] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f28997f9-c3a0-4545-a5ee-68a5fb8ca54a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.152604] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Waiting for the task: (returnval){ [ 616.152604] env[69475]: value = "task-3507525" [ 616.152604] env[69475]: _type = "Task" [ 616.152604] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.166665] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507525, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.214242] env[69475]: DEBUG nova.network.neutron [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Updating instance_info_cache with network_info: [{"id": "73a9904f-d8b5-4a55-8338-3f26cce4f9f7", "address": "fa:16:3e:f3:5c:c6", "network": {"id": "c35eed50-417f-4eee-92d8-63f9c06d148f", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-49170861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947a74cfc69b45dbb3aa09060c5b76f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73a9904f-d8", "ovs_interfaceid": "73a9904f-d8b5-4a55-8338-3f26cce4f9f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.299134] env[69475]: DEBUG nova.network.neutron [-] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.313555] env[69475]: DEBUG nova.network.neutron [-] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.331159] env[69475]: DEBUG oslo_vmware.api [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507516, 'name': PowerOnVM_Task, 'duration_secs': 1.684511} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.331428] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 616.331626] env[69475]: INFO nova.compute.manager [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Took 13.84 seconds to spawn the instance on the hypervisor. [ 616.331803] env[69475]: DEBUG nova.compute.manager [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 616.332846] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a68c236d-7f54-4317-be22-fee838222f29 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.354409] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fa6b94ee-3070-42f6-a9d5-4323e1e426f2 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "4465f156-09cc-4eba-90e4-be76f3010363" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.890s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 616.354639] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Acquiring lock "a22a4d65-56eb-4313-bd0e-81148981f5b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.355167] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Lock "a22a4d65-56eb-4313-bd0e-81148981f5b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.359882] env[69475]: DEBUG nova.compute.manager [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 616.378993] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': task-3507524, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.596583] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a7b9efc-91b6-415d-b236-e3a454670199 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.604466] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113b5fcc-9d54-426c-93f2-552c7706630e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.637821] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f610db4a-2309-44f7-b4ad-a8cace9691d9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.646013] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2054fb3b-fb30-4af6-ab84-a595df3a521b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.663716] env[69475]: DEBUG nova.compute.provider_tree [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 616.670291] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507525, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.037494} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.670580] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 616.670812] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Moving file from [datastore2] vmware_temp/19cd01f2-3bb9-4c6c-a489-8cc821947ed6/afa9d32c-9f39-44fb-bf3b-50d35842a59f to [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f. {{(pid=69475) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 616.671558] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-b78efeb8-f831-4324-ac9a-258cdd18cd25 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.678233] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Waiting for the task: (returnval){ [ 616.678233] env[69475]: value = "task-3507526" [ 616.678233] env[69475]: _type = "Task" [ 616.678233] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.689373] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507526, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.718095] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Releasing lock "refresh_cache-25c44ae0-4193-4833-85ec-ebc0ef3cf593" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 616.718310] env[69475]: DEBUG nova.compute.manager [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Instance network_info: |[{"id": "73a9904f-d8b5-4a55-8338-3f26cce4f9f7", "address": "fa:16:3e:f3:5c:c6", "network": {"id": "c35eed50-417f-4eee-92d8-63f9c06d148f", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-49170861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947a74cfc69b45dbb3aa09060c5b76f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73a9904f-d8", "ovs_interfaceid": "73a9904f-d8b5-4a55-8338-3f26cce4f9f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 616.718738] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:5c:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1002b79b-224e-41e3-a484-4245a767147a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '73a9904f-d8b5-4a55-8338-3f26cce4f9f7', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 616.726327] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Creating folder: Project (947a74cfc69b45dbb3aa09060c5b76f4). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 616.726589] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-566a0880-7646-41d7-8fe3-fecbf9399ae5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.737437] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Created folder: Project (947a74cfc69b45dbb3aa09060c5b76f4) in parent group-v700823. [ 616.737631] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Creating folder: Instances. Parent ref: group-v700848. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 616.737865] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-583b6da5-4724-4238-a16e-39d4c453d0c3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.746572] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Created folder: Instances in parent group-v700848. [ 616.746752] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 616.747207] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 616.747417] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d01ea8e6-bd41-4609-95bf-218024fdeb50 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.767022] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 616.767022] env[69475]: value = "task-3507529" [ 616.767022] env[69475]: _type = "Task" [ 616.767022] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.774880] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507529, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.802261] env[69475]: INFO nova.compute.manager [-] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Took 1.94 seconds to deallocate network for instance. [ 616.820342] env[69475]: INFO nova.compute.manager [-] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Took 1.91 seconds to deallocate network for instance. [ 616.857718] env[69475]: DEBUG nova.compute.manager [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 616.860895] env[69475]: INFO nova.compute.manager [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Took 27.80 seconds to build instance. [ 616.883340] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': task-3507524, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.910534] env[69475]: DEBUG nova.network.neutron [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Updated VIF entry in instance network info cache for port 806e8096-632b-4993-a27c-3eb4767e9d00. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 616.910930] env[69475]: DEBUG nova.network.neutron [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Updating instance_info_cache with network_info: [{"id": "806e8096-632b-4993-a27c-3eb4767e9d00", "address": "fa:16:3e:db:bb:e6", "network": {"id": "1d7c82a8-9307-4c3e-938e-03ec482c5ac1", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1922599493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25e8170617f6470dbcf8c36752c83214", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27e0a333-0cad-496c-8e6e-37a2edc97ac4", "external-id": "nsx-vlan-transportzone-83", "segmentation_id": 83, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap806e8096-63", "ovs_interfaceid": "806e8096-632b-4993-a27c-3eb4767e9d00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.190359] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507526, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.056195} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.191579] env[69475]: ERROR nova.scheduler.client.report [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [req-a92815db-1ae5-41db-b7a4-beb5f730a7e5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dd221100-68c1-4a75-92b5-b24d81fee5da. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a92815db-1ae5-41db-b7a4-beb5f730a7e5"}]} [ 617.192053] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] File moved {{(pid=69475) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 617.192453] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Cleaning up location [datastore2] vmware_temp/19cd01f2-3bb9-4c6c-a489-8cc821947ed6 {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 617.192642] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Deleting the datastore file [datastore2] vmware_temp/19cd01f2-3bb9-4c6c-a489-8cc821947ed6 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 617.195359] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9af68668-a52c-412c-94b3-82c57b6362e8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.206742] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Waiting for the task: (returnval){ [ 617.206742] env[69475]: value = "task-3507530" [ 617.206742] env[69475]: _type = "Task" [ 617.206742] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.220592] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507530, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.223513] env[69475]: DEBUG nova.scheduler.client.report [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Refreshing inventories for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 617.248350] env[69475]: DEBUG nova.scheduler.client.report [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Updating ProviderTree inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 617.248603] env[69475]: DEBUG nova.compute.provider_tree [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 617.262693] env[69475]: DEBUG nova.scheduler.client.report [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Refreshing aggregate associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, aggregates: None {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 617.277475] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507529, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.288289] env[69475]: DEBUG nova.scheduler.client.report [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Refreshing trait associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 617.309637] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.326580] env[69475]: DEBUG oslo_concurrency.lockutils [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.331119] env[69475]: DEBUG nova.network.neutron [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Successfully updated port: 267ce176-9932-4001-a96f-4e89c511fca3 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 617.367435] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc2bd11a-bc55-4f3f-a65c-acea89baf1e7 tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Lock "48bc79bc-df56-4523-808f-a71b391062b9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.319s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 617.375215] env[69475]: DEBUG nova.compute.manager [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 617.385754] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.389471] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': task-3507524, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.412580] env[69475]: DEBUG nova.virt.hardware [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 617.412856] env[69475]: DEBUG nova.virt.hardware [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 617.412977] env[69475]: DEBUG nova.virt.hardware [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 617.413216] env[69475]: DEBUG nova.virt.hardware [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 617.413389] env[69475]: DEBUG nova.virt.hardware [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 617.413575] env[69475]: DEBUG nova.virt.hardware [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 617.413834] env[69475]: DEBUG nova.virt.hardware [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 617.414414] env[69475]: DEBUG nova.virt.hardware [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 617.414697] env[69475]: DEBUG nova.virt.hardware [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 617.415389] env[69475]: DEBUG nova.virt.hardware [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 617.416072] env[69475]: DEBUG nova.virt.hardware [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 617.417035] env[69475]: DEBUG oslo_concurrency.lockutils [req-496e43a5-3351-4377-9eb2-24b06c35eb1d req-c5ef3b9b-bdcb-4f82-882e-c6a62644ee59 service nova] Releasing lock "refresh_cache-9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.419035] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb99e7a-8fba-4c2c-947e-c4a0ce7de13a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.427701] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a01f4bc-b6e2-4d33-8492-203202c663eb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.443747] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Instance VIF info [] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 617.449470] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Creating folder: Project (867c3c675ee346a5a09ef6f082784e33). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 617.452432] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2105dd52-0f90-4440-89fe-641d0f8ca677 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.463339] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Created folder: Project (867c3c675ee346a5a09ef6f082784e33) in parent group-v700823. [ 617.463544] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Creating folder: Instances. Parent ref: group-v700851. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 617.463799] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-33d36026-4624-4b38-b85b-8c1f3a4f8229 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.473413] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Created folder: Instances in parent group-v700851. [ 617.473655] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 617.473845] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67287947-ecce-4462-8268-23bcc7421766] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 617.474061] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba40a5f5-8177-4489-932e-0b49a726dc55 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.493061] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 617.493061] env[69475]: value = "task-3507533" [ 617.493061] env[69475]: _type = "Task" [ 617.493061] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.500748] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507533, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.567213] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073d0531-5cdf-4e31-a6db-7e45880de8b8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.574581] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b25be8-6ed9-4f60-9c3c-8b90ec6d8eb9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.606417] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a2c28c-c5eb-432d-89a3-31cce11953fd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.614593] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1648810-d591-496b-aea8-370d41065539 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.628453] env[69475]: DEBUG nova.compute.provider_tree [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 617.716373] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507530, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035954} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.716627] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 617.717488] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1baf6d6c-c3cc-455a-b338-57ccfa2fa9da {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.722653] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Waiting for the task: (returnval){ [ 617.722653] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5293892a-da75-5092-96dd-2cc30fd35149" [ 617.722653] env[69475]: _type = "Task" [ 617.722653] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.730778] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5293892a-da75-5092-96dd-2cc30fd35149, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.777926] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507529, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.834830] env[69475]: DEBUG oslo_concurrency.lockutils [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Acquiring lock "refresh_cache-b255f4d7-b177-4d6c-8a28-dcb5a179c1c0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.835048] env[69475]: DEBUG oslo_concurrency.lockutils [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Acquired lock "refresh_cache-b255f4d7-b177-4d6c-8a28-dcb5a179c1c0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.835255] env[69475]: DEBUG nova.network.neutron [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 617.884857] env[69475]: DEBUG oslo_vmware.api [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': task-3507524, 'name': PowerOnVM_Task, 'duration_secs': 1.616046} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.885259] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 617.885556] env[69475]: INFO nova.compute.manager [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Took 13.15 seconds to spawn the instance on the hypervisor. [ 617.885773] env[69475]: DEBUG nova.compute.manager [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 617.886716] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a01e12-aaf3-4310-bc2d-487e2856097a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.005456] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507533, 'name': CreateVM_Task, 'duration_secs': 0.474771} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.005613] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 67287947-ecce-4462-8268-23bcc7421766] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 618.006698] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.006934] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.007271] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 618.007670] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8627ff7a-ce01-4fea-ad22-7c4d85196e28 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.013068] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Waiting for the task: (returnval){ [ 618.013068] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b8a37b-7610-0ba6-8ed0-d8e70e885fc5" [ 618.013068] env[69475]: _type = "Task" [ 618.013068] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.023376] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b8a37b-7610-0ba6-8ed0-d8e70e885fc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.169852] env[69475]: DEBUG nova.scheduler.client.report [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Updated inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da with generation 18 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 618.170134] env[69475]: DEBUG nova.compute.provider_tree [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Updating resource provider dd221100-68c1-4a75-92b5-b24d81fee5da generation from 18 to 19 during operation: update_inventory {{(pid=69475) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 618.170316] env[69475]: DEBUG nova.compute.provider_tree [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 618.234072] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5293892a-da75-5092-96dd-2cc30fd35149, 'name': SearchDatastore_Task, 'duration_secs': 0.011006} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.234339] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.234589] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de/9cfd8425-c1aa-4dbc-afa4-3a5aa10428de.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 618.234839] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a80d1d1d-d67d-4253-838a-57c45c1a2f45 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.242549] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Waiting for the task: (returnval){ [ 618.242549] env[69475]: value = "task-3507534" [ 618.242549] env[69475]: _type = "Task" [ 618.242549] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.254496] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507534, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.279075] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507529, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.408162] env[69475]: INFO nova.compute.manager [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Took 25.20 seconds to build instance. [ 618.526967] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b8a37b-7610-0ba6-8ed0-d8e70e885fc5, 'name': SearchDatastore_Task, 'duration_secs': 0.011103} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.527557] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.527796] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 618.528153] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.528377] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.528660] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 618.528999] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b92eeb6-d438-436c-977f-3eff8c0c44be {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.543857] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 618.544081] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 618.544963] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b6db92c-11af-4651-a164-48fe6d0292f8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.554653] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Waiting for the task: (returnval){ [ 618.554653] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5278db57-200e-cbcd-889a-58d50d26c08f" [ 618.554653] env[69475]: _type = "Task" [ 618.554653] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.565069] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5278db57-200e-cbcd-889a-58d50d26c08f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.593280] env[69475]: DEBUG nova.network.neutron [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.675712] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.332s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.678052] env[69475]: DEBUG nova.compute.manager [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 618.680023] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.634s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.681802] env[69475]: INFO nova.compute.claims [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 618.761361] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507534, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.785597] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507529, 'name': CreateVM_Task, 'duration_secs': 1.581114} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.785597] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 618.785597] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.785597] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.785597] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 618.785805] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83f4de31-fafe-4b66-a89c-f4f99f435433 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.789705] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 618.789705] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522dd115-9487-c618-884d-b541bace7674" [ 618.789705] env[69475]: _type = "Task" [ 618.789705] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.801495] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522dd115-9487-c618-884d-b541bace7674, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.883496] env[69475]: DEBUG nova.network.neutron [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Updating instance_info_cache with network_info: [{"id": "267ce176-9932-4001-a96f-4e89c511fca3", "address": "fa:16:3e:58:e7:62", "network": {"id": "b99233c7-e1eb-4d5b-ba20-9795ba71b047", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-227500057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b8dc36bed3a4bf5ae76fbc0c3c252a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap267ce176-99", "ovs_interfaceid": "267ce176-9932-4001-a96f-4e89c511fca3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.904117] env[69475]: DEBUG nova.compute.manager [req-fb14cebd-d373-40b5-8874-c0fc2c817d2f req-7d201eaa-a107-4940-8822-7bf348f12695 service nova] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Received event network-vif-plugged-73a9904f-d8b5-4a55-8338-3f26cce4f9f7 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 618.904372] env[69475]: DEBUG oslo_concurrency.lockutils [req-fb14cebd-d373-40b5-8874-c0fc2c817d2f req-7d201eaa-a107-4940-8822-7bf348f12695 service nova] Acquiring lock "25c44ae0-4193-4833-85ec-ebc0ef3cf593-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 618.904734] env[69475]: DEBUG oslo_concurrency.lockutils [req-fb14cebd-d373-40b5-8874-c0fc2c817d2f req-7d201eaa-a107-4940-8822-7bf348f12695 service nova] Lock "25c44ae0-4193-4833-85ec-ebc0ef3cf593-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 618.904798] env[69475]: DEBUG oslo_concurrency.lockutils [req-fb14cebd-d373-40b5-8874-c0fc2c817d2f req-7d201eaa-a107-4940-8822-7bf348f12695 service nova] Lock "25c44ae0-4193-4833-85ec-ebc0ef3cf593-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 618.904951] env[69475]: DEBUG nova.compute.manager [req-fb14cebd-d373-40b5-8874-c0fc2c817d2f req-7d201eaa-a107-4940-8822-7bf348f12695 service nova] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] No waiting events found dispatching network-vif-plugged-73a9904f-d8b5-4a55-8338-3f26cce4f9f7 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 618.905115] env[69475]: WARNING nova.compute.manager [req-fb14cebd-d373-40b5-8874-c0fc2c817d2f req-7d201eaa-a107-4940-8822-7bf348f12695 service nova] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Received unexpected event network-vif-plugged-73a9904f-d8b5-4a55-8338-3f26cce4f9f7 for instance with vm_state building and task_state spawning. [ 618.905302] env[69475]: DEBUG nova.compute.manager [req-fb14cebd-d373-40b5-8874-c0fc2c817d2f req-7d201eaa-a107-4940-8822-7bf348f12695 service nova] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Received event network-changed-73a9904f-d8b5-4a55-8338-3f26cce4f9f7 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 618.905504] env[69475]: DEBUG nova.compute.manager [req-fb14cebd-d373-40b5-8874-c0fc2c817d2f req-7d201eaa-a107-4940-8822-7bf348f12695 service nova] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Refreshing instance network info cache due to event network-changed-73a9904f-d8b5-4a55-8338-3f26cce4f9f7. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 618.905690] env[69475]: DEBUG oslo_concurrency.lockutils [req-fb14cebd-d373-40b5-8874-c0fc2c817d2f req-7d201eaa-a107-4940-8822-7bf348f12695 service nova] Acquiring lock "refresh_cache-25c44ae0-4193-4833-85ec-ebc0ef3cf593" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.905943] env[69475]: DEBUG oslo_concurrency.lockutils [req-fb14cebd-d373-40b5-8874-c0fc2c817d2f req-7d201eaa-a107-4940-8822-7bf348f12695 service nova] Acquired lock "refresh_cache-25c44ae0-4193-4833-85ec-ebc0ef3cf593" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.910028] env[69475]: DEBUG nova.network.neutron [req-fb14cebd-d373-40b5-8874-c0fc2c817d2f req-7d201eaa-a107-4940-8822-7bf348f12695 service nova] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Refreshing network info cache for port 73a9904f-d8b5-4a55-8338-3f26cce4f9f7 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 618.912331] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5de0e2-1d8f-453a-a7a1-3453714093f7 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Lock "7be48799-ea4a-4e7f-95c2-637460596cfc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.713s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.063216] env[69475]: DEBUG nova.compute.manager [None req-af7b8871-79e6-4f2e-aa98-b691572a2264 tempest-ServerDiagnosticsTest-2088354602 tempest-ServerDiagnosticsTest-2088354602-project-admin] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 619.064784] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0eafeaa-fde1-4f2e-937a-e2f38172a733 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.071536] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5278db57-200e-cbcd-889a-58d50d26c08f, 'name': SearchDatastore_Task, 'duration_secs': 0.051741} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.072536] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03c36d2b-4aee-4ebe-9584-037baffdfc7f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.077817] env[69475]: INFO nova.compute.manager [None req-af7b8871-79e6-4f2e-aa98-b691572a2264 tempest-ServerDiagnosticsTest-2088354602 tempest-ServerDiagnosticsTest-2088354602-project-admin] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Retrieving diagnostics [ 619.079031] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1393ec6a-3dec-48de-84b9-607bc781d7d6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.082817] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Waiting for the task: (returnval){ [ 619.082817] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521fa3b7-7822-f09e-2671-f820f452d88c" [ 619.082817] env[69475]: _type = "Task" [ 619.082817] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.122697] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521fa3b7-7822-f09e-2671-f820f452d88c, 'name': SearchDatastore_Task, 'duration_secs': 0.02574} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.123450] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.123904] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 67287947-ecce-4462-8268-23bcc7421766/67287947-ecce-4462-8268-23bcc7421766.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 619.124350] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f764452f-cc89-4340-ae1f-d6a47570f839 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.132348] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Waiting for the task: (returnval){ [ 619.132348] env[69475]: value = "task-3507535" [ 619.132348] env[69475]: _type = "Task" [ 619.132348] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.145532] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': task-3507535, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.190100] env[69475]: DEBUG nova.compute.utils [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 619.191809] env[69475]: DEBUG nova.compute.manager [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 619.192893] env[69475]: DEBUG nova.network.neutron [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 619.260205] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507534, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597584} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.260205] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de/9cfd8425-c1aa-4dbc-afa4-3a5aa10428de.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 619.260205] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 619.260205] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-20cd9a65-ce31-48e4-90a2-68c936278f05 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.266948] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Waiting for the task: (returnval){ [ 619.266948] env[69475]: value = "task-3507536" [ 619.266948] env[69475]: _type = "Task" [ 619.266948] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.272148] env[69475]: DEBUG nova.policy [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8fc1b162c8b2400c863fcc0b1de0647f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f2d589b9070842fdb8c16179eff3433f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 619.279749] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507536, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.301865] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522dd115-9487-c618-884d-b541bace7674, 'name': SearchDatastore_Task, 'duration_secs': 0.029847} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.301865] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.301865] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 619.301865] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.302327] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 619.303189] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 619.303189] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-616fe9dc-dc5f-42d6-bd2a-845c464c4e9e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.312473] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 619.312754] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 619.313603] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d1d565c-e208-46ad-9625-2e01139d19e2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.319204] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 619.319204] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528d86d6-af18-0e91-09a2-31b685f02ca9" [ 619.319204] env[69475]: _type = "Task" [ 619.319204] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.332432] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528d86d6-af18-0e91-09a2-31b685f02ca9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.337764] env[69475]: DEBUG nova.compute.manager [req-ce5efb62-a922-409d-87c3-1d88d084c077 req-0df98a9a-0660-490e-8a2e-1e001eeb144d service nova] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Received event network-vif-plugged-267ce176-9932-4001-a96f-4e89c511fca3 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 619.337764] env[69475]: DEBUG oslo_concurrency.lockutils [req-ce5efb62-a922-409d-87c3-1d88d084c077 req-0df98a9a-0660-490e-8a2e-1e001eeb144d service nova] Acquiring lock "b255f4d7-b177-4d6c-8a28-dcb5a179c1c0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.337764] env[69475]: DEBUG oslo_concurrency.lockutils [req-ce5efb62-a922-409d-87c3-1d88d084c077 req-0df98a9a-0660-490e-8a2e-1e001eeb144d service nova] Lock "b255f4d7-b177-4d6c-8a28-dcb5a179c1c0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.337764] env[69475]: DEBUG oslo_concurrency.lockutils [req-ce5efb62-a922-409d-87c3-1d88d084c077 req-0df98a9a-0660-490e-8a2e-1e001eeb144d service nova] Lock "b255f4d7-b177-4d6c-8a28-dcb5a179c1c0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.337764] env[69475]: DEBUG nova.compute.manager [req-ce5efb62-a922-409d-87c3-1d88d084c077 req-0df98a9a-0660-490e-8a2e-1e001eeb144d service nova] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] No waiting events found dispatching network-vif-plugged-267ce176-9932-4001-a96f-4e89c511fca3 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 619.337919] env[69475]: WARNING nova.compute.manager [req-ce5efb62-a922-409d-87c3-1d88d084c077 req-0df98a9a-0660-490e-8a2e-1e001eeb144d service nova] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Received unexpected event network-vif-plugged-267ce176-9932-4001-a96f-4e89c511fca3 for instance with vm_state building and task_state spawning. [ 619.386392] env[69475]: DEBUG oslo_concurrency.lockutils [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Releasing lock "refresh_cache-b255f4d7-b177-4d6c-8a28-dcb5a179c1c0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.386773] env[69475]: DEBUG nova.compute.manager [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Instance network_info: |[{"id": "267ce176-9932-4001-a96f-4e89c511fca3", "address": "fa:16:3e:58:e7:62", "network": {"id": "b99233c7-e1eb-4d5b-ba20-9795ba71b047", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-227500057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b8dc36bed3a4bf5ae76fbc0c3c252a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap267ce176-99", "ovs_interfaceid": "267ce176-9932-4001-a96f-4e89c511fca3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 619.387261] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:58:e7:62', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33ddef78-922c-4cd3-99b0-971ac7802856', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '267ce176-9932-4001-a96f-4e89c511fca3', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 619.395588] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Creating folder: Project (2b8dc36bed3a4bf5ae76fbc0c3c252a9). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 619.395956] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f334ed44-e050-46d2-a8f6-cbd00fbce55a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.406501] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Created folder: Project (2b8dc36bed3a4bf5ae76fbc0c3c252a9) in parent group-v700823. [ 619.406737] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Creating folder: Instances. Parent ref: group-v700854. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 619.406995] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c96b779-a33f-4452-b2ef-4c83bcbee755 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.416416] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Created folder: Instances in parent group-v700854. [ 619.416684] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 619.420021] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 619.420021] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2789c8f9-9a79-4339-97bc-39b5f50f9221 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.439129] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 619.439129] env[69475]: value = "task-3507539" [ 619.439129] env[69475]: _type = "Task" [ 619.439129] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.451026] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507539, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.629893] env[69475]: DEBUG nova.network.neutron [req-fb14cebd-d373-40b5-8874-c0fc2c817d2f req-7d201eaa-a107-4940-8822-7bf348f12695 service nova] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Updated VIF entry in instance network info cache for port 73a9904f-d8b5-4a55-8338-3f26cce4f9f7. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 619.630336] env[69475]: DEBUG nova.network.neutron [req-fb14cebd-d373-40b5-8874-c0fc2c817d2f req-7d201eaa-a107-4940-8822-7bf348f12695 service nova] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Updating instance_info_cache with network_info: [{"id": "73a9904f-d8b5-4a55-8338-3f26cce4f9f7", "address": "fa:16:3e:f3:5c:c6", "network": {"id": "c35eed50-417f-4eee-92d8-63f9c06d148f", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-49170861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947a74cfc69b45dbb3aa09060c5b76f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73a9904f-d8", "ovs_interfaceid": "73a9904f-d8b5-4a55-8338-3f26cce4f9f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.643636] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': task-3507535, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.697192] env[69475]: DEBUG nova.compute.manager [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 619.780427] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507536, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.141623} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.780755] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 619.781649] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e43df8bb-440e-4129-868f-9d2e58a3fb8f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.805344] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de/9cfd8425-c1aa-4dbc-afa4-3a5aa10428de.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 619.808967] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55d1a74a-0151-4ca3-afe9-09e1cc61dd8c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.838665] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528d86d6-af18-0e91-09a2-31b685f02ca9, 'name': SearchDatastore_Task, 'duration_secs': 0.01048} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.844068] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Waiting for the task: (returnval){ [ 619.844068] env[69475]: value = "task-3507540" [ 619.844068] env[69475]: _type = "Task" [ 619.844068] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.844652] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abb5f3eb-77c8-442d-96ba-8a2193715497 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.860026] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507540, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.862335] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 619.862335] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a4e98a-a065-eaa4-4301-dd4b81c7e23d" [ 619.862335] env[69475]: _type = "Task" [ 619.862335] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.871545] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a4e98a-a065-eaa4-4301-dd4b81c7e23d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.939896] env[69475]: DEBUG nova.network.neutron [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Successfully created port: d37fa2fb-69e3-46f1-a6a6-1794a902a86e {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 619.953027] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507539, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.960605] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c59587-c95d-40ec-89e7-80c96a782ef3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.968337] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e540afb0-c8bb-4ad4-bcd9-96a243d02afd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.004891] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bdf549c-10c8-400d-a044-6ede33b4df7d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.013162] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5308bb95-7366-4a86-ac0d-b924698d38d1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.031101] env[69475]: DEBUG nova.compute.provider_tree [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.132876] env[69475]: DEBUG oslo_concurrency.lockutils [req-fb14cebd-d373-40b5-8874-c0fc2c817d2f req-7d201eaa-a107-4940-8822-7bf348f12695 service nova] Releasing lock "refresh_cache-25c44ae0-4193-4833-85ec-ebc0ef3cf593" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.133171] env[69475]: DEBUG nova.compute.manager [req-fb14cebd-d373-40b5-8874-c0fc2c817d2f req-7d201eaa-a107-4940-8822-7bf348f12695 service nova] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Received event network-vif-deleted-fd190b86-eed6-4857-9dcb-7fc4a209989d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 620.133370] env[69475]: DEBUG nova.compute.manager [req-fb14cebd-d373-40b5-8874-c0fc2c817d2f req-7d201eaa-a107-4940-8822-7bf348f12695 service nova] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Received event network-vif-deleted-77ecc630-733d-4acd-8e33-5354c11dd9a3 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 620.143973] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': task-3507535, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.795255} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.144165] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 67287947-ecce-4462-8268-23bcc7421766/67287947-ecce-4462-8268-23bcc7421766.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 620.144381] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 620.144629] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-19fd40a0-064e-409e-9ece-3483c580bc05 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.151114] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Waiting for the task: (returnval){ [ 620.151114] env[69475]: value = "task-3507541" [ 620.151114] env[69475]: _type = "Task" [ 620.151114] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.159859] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': task-3507541, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.359261] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507540, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.374638] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a4e98a-a065-eaa4-4301-dd4b81c7e23d, 'name': SearchDatastore_Task, 'duration_secs': 0.043521} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.374839] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.374983] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 25c44ae0-4193-4833-85ec-ebc0ef3cf593/25c44ae0-4193-4833-85ec-ebc0ef3cf593.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 620.375531] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f145bc92-6615-4367-b1b5-e01ca944be03 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.384898] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 620.384898] env[69475]: value = "task-3507542" [ 620.384898] env[69475]: _type = "Task" [ 620.384898] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.396839] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3507542, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.458626] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507539, 'name': CreateVM_Task, 'duration_secs': 0.655365} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.458888] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 620.459652] env[69475]: DEBUG oslo_concurrency.lockutils [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.459832] env[69475]: DEBUG oslo_concurrency.lockutils [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.460180] env[69475]: DEBUG oslo_concurrency.lockutils [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 620.460465] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7980edde-104e-42e8-8168-68c613d35f7a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.466320] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Waiting for the task: (returnval){ [ 620.466320] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e8ee64-a731-f0a4-fcb1-2a3a00a75f54" [ 620.466320] env[69475]: _type = "Task" [ 620.466320] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.476381] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e8ee64-a731-f0a4-fcb1-2a3a00a75f54, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.533693] env[69475]: DEBUG nova.scheduler.client.report [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 620.628444] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "93607154-f135-4925-9c3a-a97051535b00" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 620.628673] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "93607154-f135-4925-9c3a-a97051535b00" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 620.661030] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': task-3507541, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06911} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.661030] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 620.662382] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-596e4fb1-5618-42b8-9468-7237aa3eb5e9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.682335] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Reconfiguring VM instance instance-0000000b to attach disk [datastore2] 67287947-ecce-4462-8268-23bcc7421766/67287947-ecce-4462-8268-23bcc7421766.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 620.682956] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-483fdab3-541f-4b5a-a45c-fe09a3d48f8a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.703852] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Waiting for the task: (returnval){ [ 620.703852] env[69475]: value = "task-3507543" [ 620.703852] env[69475]: _type = "Task" [ 620.703852] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.708247] env[69475]: DEBUG nova.compute.manager [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 620.720788] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': task-3507543, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.742296] env[69475]: DEBUG nova.virt.hardware [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 620.742537] env[69475]: DEBUG nova.virt.hardware [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 620.742690] env[69475]: DEBUG nova.virt.hardware [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 620.743075] env[69475]: DEBUG nova.virt.hardware [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 620.743075] env[69475]: DEBUG nova.virt.hardware [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 620.743196] env[69475]: DEBUG nova.virt.hardware [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 620.743477] env[69475]: DEBUG nova.virt.hardware [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 620.743642] env[69475]: DEBUG nova.virt.hardware [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 620.744086] env[69475]: DEBUG nova.virt.hardware [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 620.744086] env[69475]: DEBUG nova.virt.hardware [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 620.744376] env[69475]: DEBUG nova.virt.hardware [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 620.745253] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e322e654-3dc3-427d-8151-06317093b1db {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.756112] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-966f1cca-fb10-4caf-b054-b6dd5782b49b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.859152] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507540, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.878230] env[69475]: DEBUG oslo_concurrency.lockutils [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "235653ac-a893-4f42-a394-dd81f61f0d73" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 620.878617] env[69475]: DEBUG oslo_concurrency.lockutils [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "235653ac-a893-4f42-a394-dd81f61f0d73" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 620.896876] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3507542, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.981702] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e8ee64-a731-f0a4-fcb1-2a3a00a75f54, 'name': SearchDatastore_Task, 'duration_secs': 0.017825} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.981994] env[69475]: DEBUG oslo_concurrency.lockutils [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 620.982282] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 620.982520] env[69475]: DEBUG oslo_concurrency.lockutils [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.982673] env[69475]: DEBUG oslo_concurrency.lockutils [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.982853] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 620.983209] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-31e2cde4-3c55-4ed7-83fc-c04288c98b39 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.001068] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 621.001259] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 621.003387] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28a628be-a1c7-4078-9db3-c0bc03e869e9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.011021] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Waiting for the task: (returnval){ [ 621.011021] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f647f2-bfe5-669a-26c8-b94d17e4b9f0" [ 621.011021] env[69475]: _type = "Task" [ 621.011021] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.019245] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f647f2-bfe5-669a-26c8-b94d17e4b9f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.041527] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.361s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 621.042890] env[69475]: DEBUG nova.compute.manager [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 621.047379] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.738s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.047379] env[69475]: DEBUG nova.objects.instance [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Lazy-loading 'resources' on Instance uuid dc2614b1-95b8-4887-8ca6-efe92921c926 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 621.131423] env[69475]: DEBUG nova.compute.manager [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 621.216421] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': task-3507543, 'name': ReconfigVM_Task, 'duration_secs': 0.288741} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.217186] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Reconfigured VM instance instance-0000000b to attach disk [datastore2] 67287947-ecce-4462-8268-23bcc7421766/67287947-ecce-4462-8268-23bcc7421766.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 621.217589] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-344de276-220c-437c-afbf-b653a7bf9003 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.228050] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Waiting for the task: (returnval){ [ 621.228050] env[69475]: value = "task-3507544" [ 621.228050] env[69475]: _type = "Task" [ 621.228050] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.238787] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': task-3507544, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.361483] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507540, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.385304] env[69475]: DEBUG nova.compute.manager [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 621.400046] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3507542, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.523471] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f647f2-bfe5-669a-26c8-b94d17e4b9f0, 'name': SearchDatastore_Task, 'duration_secs': 0.066268} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.524783] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97e7ee9c-b0c8-4322-95d7-9fc30432668a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.533551] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Waiting for the task: (returnval){ [ 621.533551] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d88c8e-d61d-8c57-7a85-d53025f5968e" [ 621.533551] env[69475]: _type = "Task" [ 621.533551] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.551910] env[69475]: DEBUG nova.compute.utils [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 621.560816] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d88c8e-d61d-8c57-7a85-d53025f5968e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.560816] env[69475]: DEBUG nova.compute.manager [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 621.563358] env[69475]: DEBUG nova.network.neutron [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 621.669945] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.724152] env[69475]: DEBUG nova.policy [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82f6c3724a2b4430b8df87655ff91c63', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1073981d0d7740e78805798e02ff9d55', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 621.742370] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': task-3507544, 'name': Rename_Task, 'duration_secs': 0.158948} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.742668] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 621.742917] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-281677dc-9648-4ef4-87c7-9500225a1c42 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.755271] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Waiting for the task: (returnval){ [ 621.755271] env[69475]: value = "task-3507545" [ 621.755271] env[69475]: _type = "Task" [ 621.755271] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.769488] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': task-3507545, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.868413] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507540, 'name': ReconfigVM_Task, 'duration_secs': 1.673114} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.874150] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Reconfigured VM instance instance-00000008 to attach disk [datastore2] 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de/9cfd8425-c1aa-4dbc-afa4-3a5aa10428de.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 621.874794] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9856290e-5e40-4984-aa6e-d97b0edcbd8f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.887545] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Waiting for the task: (returnval){ [ 621.887545] env[69475]: value = "task-3507546" [ 621.887545] env[69475]: _type = "Task" [ 621.887545] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.904940] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f375d00a-45e7-4912-95c2-edca7380924a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.928795] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df647af-421b-42e6-9a3e-d6437e4c8af2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.932993] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3507542, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.933276] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507546, 'name': Rename_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.934848] env[69475]: DEBUG oslo_concurrency.lockutils [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.973076] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88404994-49de-44af-ac63-613b076fa830 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.986063] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7bd713-1b26-4bde-ba43-204c476c145f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.008899] env[69475]: DEBUG nova.compute.provider_tree [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 622.048098] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d88c8e-d61d-8c57-7a85-d53025f5968e, 'name': SearchDatastore_Task, 'duration_secs': 0.018198} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.048593] env[69475]: DEBUG oslo_concurrency.lockutils [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 622.048733] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] b255f4d7-b177-4d6c-8a28-dcb5a179c1c0/b255f4d7-b177-4d6c-8a28-dcb5a179c1c0.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 622.049433] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-692ce92f-c8ad-4387-bc67-67c8a641af9b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.061089] env[69475]: DEBUG nova.compute.manager [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 622.064150] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Waiting for the task: (returnval){ [ 622.064150] env[69475]: value = "task-3507547" [ 622.064150] env[69475]: _type = "Task" [ 622.064150] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.077661] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': task-3507547, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.135075] env[69475]: DEBUG nova.network.neutron [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Successfully updated port: d37fa2fb-69e3-46f1-a6a6-1794a902a86e {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 622.269394] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': task-3507545, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.326940] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Acquiring lock "e1ecc905-22da-434a-8ddf-a66f88ab47fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.327246] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Lock "e1ecc905-22da-434a-8ddf-a66f88ab47fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.327454] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Acquiring lock "e1ecc905-22da-434a-8ddf-a66f88ab47fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.327633] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Lock "e1ecc905-22da-434a-8ddf-a66f88ab47fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.327860] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Lock "e1ecc905-22da-434a-8ddf-a66f88ab47fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 622.330454] env[69475]: INFO nova.compute.manager [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Terminating instance [ 622.404675] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507546, 'name': Rename_Task, 'duration_secs': 0.37579} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.404675] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 622.404675] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7e7d0106-778b-4a3d-8a76-0868b17d6a25 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.413316] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3507542, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.75455} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.415491] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 25c44ae0-4193-4833-85ec-ebc0ef3cf593/25c44ae0-4193-4833-85ec-ebc0ef3cf593.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 622.415683] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 622.416029] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Waiting for the task: (returnval){ [ 622.416029] env[69475]: value = "task-3507548" [ 622.416029] env[69475]: _type = "Task" [ 622.416029] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.416221] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f5b67e05-f86b-4588-8f3d-ec516fe98426 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.433041] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507548, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.434836] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 622.434836] env[69475]: value = "task-3507549" [ 622.434836] env[69475]: _type = "Task" [ 622.434836] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.448425] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3507549, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.512624] env[69475]: DEBUG nova.scheduler.client.report [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 622.585511] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': task-3507547, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.640227] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Acquiring lock "refresh_cache-af5dc581-cf6a-4b84-8bcf-96606ae07cc1" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.640227] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Acquired lock "refresh_cache-af5dc581-cf6a-4b84-8bcf-96606ae07cc1" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.640464] env[69475]: DEBUG nova.network.neutron [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 622.772371] env[69475]: DEBUG oslo_vmware.api [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': task-3507545, 'name': PowerOnVM_Task, 'duration_secs': 0.813065} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.772371] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 622.772371] env[69475]: INFO nova.compute.manager [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Took 5.40 seconds to spawn the instance on the hypervisor. [ 622.772371] env[69475]: DEBUG nova.compute.manager [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 622.773263] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36f621ce-fd54-4222-b96c-157024f350ba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.817650] env[69475]: DEBUG nova.network.neutron [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Successfully created port: 242cecca-1cdb-42f1-92c0-0717cd78b7eb {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 622.837783] env[69475]: DEBUG nova.compute.manager [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 622.837783] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 622.837783] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f47fa5f-ddab-4239-8233-c152ac55f0c7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.848010] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 622.848010] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-43aeb0a7-3565-48b9-b8e8-7e6227404b11 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.857099] env[69475]: DEBUG oslo_vmware.api [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Waiting for the task: (returnval){ [ 622.857099] env[69475]: value = "task-3507550" [ 622.857099] env[69475]: _type = "Task" [ 622.857099] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.870123] env[69475]: DEBUG oslo_vmware.api [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': task-3507550, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.932519] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507548, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.952133] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3507549, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.139729} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.952537] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 622.953684] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf40fefa-e7f5-49ec-931e-53a5f54f68da {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.984700] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] 25c44ae0-4193-4833-85ec-ebc0ef3cf593/25c44ae0-4193-4833-85ec-ebc0ef3cf593.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 622.986012] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e376b4c-9899-4da2-abee-98da8f96de36 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.011764] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 623.011764] env[69475]: value = "task-3507551" [ 623.011764] env[69475]: _type = "Task" [ 623.011764] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.018336] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.972s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 623.027018] env[69475]: DEBUG oslo_concurrency.lockutils [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.700s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.027398] env[69475]: DEBUG nova.objects.instance [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Lazy-loading 'resources' on Instance uuid ec7a6b3c-2a2f-4edd-8b79-ba55551d6159 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 623.029030] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3507551, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.079519] env[69475]: DEBUG nova.compute.manager [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 623.083686] env[69475]: INFO nova.scheduler.client.report [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Deleted allocations for instance dc2614b1-95b8-4887-8ca6-efe92921c926 [ 623.095595] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': task-3507547, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.732686} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.095595] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] b255f4d7-b177-4d6c-8a28-dcb5a179c1c0/b255f4d7-b177-4d6c-8a28-dcb5a179c1c0.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 623.095595] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 623.095765] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c8a0faf5-5e23-47bb-922c-e02af527ba12 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.106128] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Waiting for the task: (returnval){ [ 623.106128] env[69475]: value = "task-3507552" [ 623.106128] env[69475]: _type = "Task" [ 623.106128] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.121577] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': task-3507552, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.130788] env[69475]: DEBUG nova.virt.hardware [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 623.131103] env[69475]: DEBUG nova.virt.hardware [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 623.131270] env[69475]: DEBUG nova.virt.hardware [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 623.131452] env[69475]: DEBUG nova.virt.hardware [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 623.131591] env[69475]: DEBUG nova.virt.hardware [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 623.131754] env[69475]: DEBUG nova.virt.hardware [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 623.131971] env[69475]: DEBUG nova.virt.hardware [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 623.132194] env[69475]: DEBUG nova.virt.hardware [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 623.132394] env[69475]: DEBUG nova.virt.hardware [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 623.132558] env[69475]: DEBUG nova.virt.hardware [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 623.132726] env[69475]: DEBUG nova.virt.hardware [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 623.133696] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed844a3b-b24f-44f0-b956-43a793bedab5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.146449] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b50101a-7b43-4a27-9875-c3d5d6e47291 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.281631] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "a21ec73a-2658-4fc6-9bc1-0e492385d59e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.281631] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "a21ec73a-2658-4fc6-9bc1-0e492385d59e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.300282] env[69475]: INFO nova.compute.manager [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Took 13.39 seconds to build instance. [ 623.376090] env[69475]: DEBUG oslo_vmware.api [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': task-3507550, 'name': PowerOffVM_Task, 'duration_secs': 0.440825} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.376711] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 623.376711] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 623.376945] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ffbfc6ef-45b7-4c61-80c4-7cb9d4797026 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.431038] env[69475]: DEBUG oslo_vmware.api [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507548, 'name': PowerOnVM_Task, 'duration_secs': 1.000027} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.432292] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 623.433351] env[69475]: INFO nova.compute.manager [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Took 16.21 seconds to spawn the instance on the hypervisor. [ 623.433351] env[69475]: DEBUG nova.compute.manager [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 623.433916] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad9132d-e782-4f35-aac4-6c36279d1351 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.464819] env[69475]: DEBUG nova.network.neutron [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 623.469436] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 623.469923] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 623.469923] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Deleting the datastore file [datastore1] e1ecc905-22da-434a-8ddf-a66f88ab47fb {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 623.470265] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50b17c29-4ee6-410a-923c-12327d143d73 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.479289] env[69475]: DEBUG oslo_vmware.api [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Waiting for the task: (returnval){ [ 623.479289] env[69475]: value = "task-3507554" [ 623.479289] env[69475]: _type = "Task" [ 623.479289] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.490431] env[69475]: DEBUG oslo_vmware.api [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': task-3507554, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.529060] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3507551, 'name': ReconfigVM_Task, 'duration_secs': 0.467788} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.532015] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Reconfigured VM instance instance-00000009 to attach disk [datastore2] 25c44ae0-4193-4833-85ec-ebc0ef3cf593/25c44ae0-4193-4833-85ec-ebc0ef3cf593.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 623.532868] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-54758c70-347d-47d0-bce8-672ee33203d1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.541232] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 623.541232] env[69475]: value = "task-3507555" [ 623.541232] env[69475]: _type = "Task" [ 623.541232] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.551918] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3507555, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.605481] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7ee9157a-1c00-4847-871b-af0df44d6b8c tempest-DeleteServersAdminTestJSON-1366753598 tempest-DeleteServersAdminTestJSON-1366753598-project-admin] Lock "dc2614b1-95b8-4887-8ca6-efe92921c926" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.504s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 623.616043] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': task-3507552, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.158331} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.618578] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 623.619457] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d881df-4da2-4ad6-83e3-c76a39f0fda0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.649710] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] b255f4d7-b177-4d6c-8a28-dcb5a179c1c0/b255f4d7-b177-4d6c-8a28-dcb5a179c1c0.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 623.655548] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-867fb4b2-c750-476c-890c-e8a9eefe5e73 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.679166] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Acquiring lock "48bc79bc-df56-4523-808f-a71b391062b9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.682751] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Lock "48bc79bc-df56-4523-808f-a71b391062b9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.682751] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Acquiring lock "48bc79bc-df56-4523-808f-a71b391062b9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.682751] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Lock "48bc79bc-df56-4523-808f-a71b391062b9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 623.682751] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Lock "48bc79bc-df56-4523-808f-a71b391062b9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 623.683114] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Waiting for the task: (returnval){ [ 623.683114] env[69475]: value = "task-3507556" [ 623.683114] env[69475]: _type = "Task" [ 623.683114] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.688653] env[69475]: INFO nova.compute.manager [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Terminating instance [ 623.703707] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': task-3507556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.753202] env[69475]: DEBUG nova.compute.manager [req-9db063fa-4e39-4fc7-b7dc-1b261b217166 req-46d2fc6e-2cdd-469e-a73b-dbc1e8be5dcd service nova] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Received event network-changed-267ce176-9932-4001-a96f-4e89c511fca3 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 623.753425] env[69475]: DEBUG nova.compute.manager [req-9db063fa-4e39-4fc7-b7dc-1b261b217166 req-46d2fc6e-2cdd-469e-a73b-dbc1e8be5dcd service nova] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Refreshing instance network info cache due to event network-changed-267ce176-9932-4001-a96f-4e89c511fca3. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 623.753635] env[69475]: DEBUG oslo_concurrency.lockutils [req-9db063fa-4e39-4fc7-b7dc-1b261b217166 req-46d2fc6e-2cdd-469e-a73b-dbc1e8be5dcd service nova] Acquiring lock "refresh_cache-b255f4d7-b177-4d6c-8a28-dcb5a179c1c0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.753942] env[69475]: DEBUG oslo_concurrency.lockutils [req-9db063fa-4e39-4fc7-b7dc-1b261b217166 req-46d2fc6e-2cdd-469e-a73b-dbc1e8be5dcd service nova] Acquired lock "refresh_cache-b255f4d7-b177-4d6c-8a28-dcb5a179c1c0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 623.753942] env[69475]: DEBUG nova.network.neutron [req-9db063fa-4e39-4fc7-b7dc-1b261b217166 req-46d2fc6e-2cdd-469e-a73b-dbc1e8be5dcd service nova] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Refreshing network info cache for port 267ce176-9932-4001-a96f-4e89c511fca3 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 623.784796] env[69475]: DEBUG nova.compute.manager [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 623.794209] env[69475]: DEBUG nova.network.neutron [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Updating instance_info_cache with network_info: [{"id": "d37fa2fb-69e3-46f1-a6a6-1794a902a86e", "address": "fa:16:3e:1d:11:bf", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.219", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd37fa2fb-69", "ovs_interfaceid": "d37fa2fb-69e3-46f1-a6a6-1794a902a86e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.801399] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a3e6cac6-38eb-45f1-9ede-27fc81764855 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Lock "67287947-ecce-4462-8268-23bcc7421766" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.904s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 623.875875] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c75f44-ec95-4816-be73-0155495fc4f3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.886537] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456f73af-f0a5-4b56-aa71-8ddf0011e4f5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.927656] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31ba2c69-e3d1-4f70-9533-38c40b495b08 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.936802] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be203866-a48f-4b67-819e-e12342bb00a5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.957616] env[69475]: DEBUG nova.compute.provider_tree [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 623.962231] env[69475]: INFO nova.compute.manager [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Took 22.09 seconds to build instance. [ 623.990969] env[69475]: DEBUG oslo_vmware.api [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Task: {'id': task-3507554, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.474331} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.990969] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 623.991159] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 623.991329] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 623.991502] env[69475]: INFO nova.compute.manager [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Took 1.16 seconds to destroy the instance on the hypervisor. [ 623.991715] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 623.991907] env[69475]: DEBUG nova.compute.manager [-] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 623.992012] env[69475]: DEBUG nova.network.neutron [-] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 624.052268] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3507555, 'name': Rename_Task, 'duration_secs': 0.343589} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.052893] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 624.053396] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5325e82e-2c82-41af-a2d0-975ac50ce6ea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.060247] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 624.060247] env[69475]: value = "task-3507557" [ 624.060247] env[69475]: _type = "Task" [ 624.060247] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.071600] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3507557, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.199468] env[69475]: DEBUG nova.compute.manager [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 624.199694] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 624.200181] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': task-3507556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.201199] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d89a4d5-3b0f-4e50-8b67-49332ff95212 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.208977] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 624.209364] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-793a4850-2305-478d-8993-cf1d7d4795de {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.216311] env[69475]: DEBUG oslo_vmware.api [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Waiting for the task: (returnval){ [ 624.216311] env[69475]: value = "task-3507558" [ 624.216311] env[69475]: _type = "Task" [ 624.216311] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.224880] env[69475]: DEBUG oslo_vmware.api [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507558, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.297381] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Releasing lock "refresh_cache-af5dc581-cf6a-4b84-8bcf-96606ae07cc1" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 624.297823] env[69475]: DEBUG nova.compute.manager [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Instance network_info: |[{"id": "d37fa2fb-69e3-46f1-a6a6-1794a902a86e", "address": "fa:16:3e:1d:11:bf", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.219", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd37fa2fb-69", "ovs_interfaceid": "d37fa2fb-69e3-46f1-a6a6-1794a902a86e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 624.298678] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1d:11:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd37fa2fb-69e3-46f1-a6a6-1794a902a86e', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 624.307926] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Creating folder: Project (f2d589b9070842fdb8c16179eff3433f). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 624.308129] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77760ee3-45c4-450b-86be-a7a3a8518972 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.318632] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 624.321838] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Created folder: Project (f2d589b9070842fdb8c16179eff3433f) in parent group-v700823. [ 624.321838] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Creating folder: Instances. Parent ref: group-v700857. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 624.321838] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ec2433c0-e1bc-4a80-b75f-b5ff3d5fed22 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.330861] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Created folder: Instances in parent group-v700857. [ 624.331172] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 624.331442] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 624.331594] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7e4d2382-1059-4f92-87ce-a9c6deefe15d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.361499] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 624.361499] env[69475]: value = "task-3507561" [ 624.361499] env[69475]: _type = "Task" [ 624.361499] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.372065] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507561, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.395468] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquiring lock "c3db35f4-f43d-464c-9556-18a90866ee6a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 624.395649] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "c3db35f4-f43d-464c-9556-18a90866ee6a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 624.463552] env[69475]: DEBUG nova.scheduler.client.report [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 624.469112] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaabe977-c5fd-4f96-8376-7b8cb923a68d tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.606s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 624.573823] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3507557, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.694239] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': task-3507556, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.732610] env[69475]: DEBUG oslo_vmware.api [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507558, 'name': PowerOffVM_Task, 'duration_secs': 0.223188} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.732985] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 624.733252] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 624.733606] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b30b388e-53c6-49f4-9079-51e3cdec3e7a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.745770] env[69475]: DEBUG nova.network.neutron [req-9db063fa-4e39-4fc7-b7dc-1b261b217166 req-46d2fc6e-2cdd-469e-a73b-dbc1e8be5dcd service nova] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Updated VIF entry in instance network info cache for port 267ce176-9932-4001-a96f-4e89c511fca3. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 624.745874] env[69475]: DEBUG nova.network.neutron [req-9db063fa-4e39-4fc7-b7dc-1b261b217166 req-46d2fc6e-2cdd-469e-a73b-dbc1e8be5dcd service nova] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Updating instance_info_cache with network_info: [{"id": "267ce176-9932-4001-a96f-4e89c511fca3", "address": "fa:16:3e:58:e7:62", "network": {"id": "b99233c7-e1eb-4d5b-ba20-9795ba71b047", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-227500057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b8dc36bed3a4bf5ae76fbc0c3c252a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap267ce176-99", "ovs_interfaceid": "267ce176-9932-4001-a96f-4e89c511fca3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.801733] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 624.802197] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 624.802479] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Deleting the datastore file [datastore1] 48bc79bc-df56-4523-808f-a71b391062b9 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 624.802559] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9c3dc3f-a186-4de6-b7f0-26c90e9e2c15 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.810236] env[69475]: DEBUG oslo_vmware.api [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Waiting for the task: (returnval){ [ 624.810236] env[69475]: value = "task-3507563" [ 624.810236] env[69475]: _type = "Task" [ 624.810236] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.820402] env[69475]: DEBUG oslo_vmware.api [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507563, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.874841] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507561, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.890543] env[69475]: DEBUG nova.network.neutron [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Successfully updated port: 242cecca-1cdb-42f1-92c0-0717cd78b7eb {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 624.898838] env[69475]: DEBUG nova.compute.manager [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 624.968637] env[69475]: DEBUG oslo_concurrency.lockutils [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.942s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 624.971459] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.585s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 624.972832] env[69475]: INFO nova.compute.claims [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 625.005687] env[69475]: INFO nova.scheduler.client.report [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Deleted allocations for instance ec7a6b3c-2a2f-4edd-8b79-ba55551d6159 [ 625.073361] env[69475]: DEBUG oslo_vmware.api [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3507557, 'name': PowerOnVM_Task, 'duration_secs': 0.630666} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.073668] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 625.073823] env[69475]: INFO nova.compute.manager [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Took 12.66 seconds to spawn the instance on the hypervisor. [ 625.073999] env[69475]: DEBUG nova.compute.manager [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 625.074858] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927b9cca-6ed6-43df-94cd-2eccbf4f5828 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.195457] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': task-3507556, 'name': ReconfigVM_Task, 'duration_secs': 1.138743} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.196753] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Reconfigured VM instance instance-0000000a to attach disk [datastore2] b255f4d7-b177-4d6c-8a28-dcb5a179c1c0/b255f4d7-b177-4d6c-8a28-dcb5a179c1c0.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 625.197147] env[69475]: DEBUG nova.network.neutron [-] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.198196] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-293a7a5e-6a72-43c0-80d4-182bf6c057e5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.209289] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Waiting for the task: (returnval){ [ 625.209289] env[69475]: value = "task-3507564" [ 625.209289] env[69475]: _type = "Task" [ 625.209289] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.222334] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': task-3507564, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.248847] env[69475]: DEBUG oslo_concurrency.lockutils [req-9db063fa-4e39-4fc7-b7dc-1b261b217166 req-46d2fc6e-2cdd-469e-a73b-dbc1e8be5dcd service nova] Releasing lock "refresh_cache-b255f4d7-b177-4d6c-8a28-dcb5a179c1c0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.324577] env[69475]: DEBUG oslo_vmware.api [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Task: {'id': task-3507563, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195965} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.324577] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 625.324819] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 625.324819] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 625.325031] env[69475]: INFO nova.compute.manager [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Took 1.13 seconds to destroy the instance on the hypervisor. [ 625.325246] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 625.325443] env[69475]: DEBUG nova.compute.manager [-] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 625.325538] env[69475]: DEBUG nova.network.neutron [-] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 625.359806] env[69475]: DEBUG nova.compute.manager [req-8537e26d-a6c5-4978-bffd-5e746a79762e req-60e96a18-3af9-467a-b9e2-e1a12c7323bc service nova] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Received event network-changed-56faa0c7-80a1-46f5-8167-4485e94846ea {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 625.360085] env[69475]: DEBUG nova.compute.manager [req-8537e26d-a6c5-4978-bffd-5e746a79762e req-60e96a18-3af9-467a-b9e2-e1a12c7323bc service nova] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Refreshing instance network info cache due to event network-changed-56faa0c7-80a1-46f5-8167-4485e94846ea. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 625.360312] env[69475]: DEBUG oslo_concurrency.lockutils [req-8537e26d-a6c5-4978-bffd-5e746a79762e req-60e96a18-3af9-467a-b9e2-e1a12c7323bc service nova] Acquiring lock "refresh_cache-7be48799-ea4a-4e7f-95c2-637460596cfc" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.360454] env[69475]: DEBUG oslo_concurrency.lockutils [req-8537e26d-a6c5-4978-bffd-5e746a79762e req-60e96a18-3af9-467a-b9e2-e1a12c7323bc service nova] Acquired lock "refresh_cache-7be48799-ea4a-4e7f-95c2-637460596cfc" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.360623] env[69475]: DEBUG nova.network.neutron [req-8537e26d-a6c5-4978-bffd-5e746a79762e req-60e96a18-3af9-467a-b9e2-e1a12c7323bc service nova] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Refreshing network info cache for port 56faa0c7-80a1-46f5-8167-4485e94846ea {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 625.380507] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507561, 'name': CreateVM_Task, 'duration_secs': 0.551107} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.380507] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 625.380507] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.380507] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.380507] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 625.380507] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-391fa74a-790f-47ac-aa15-87945c7b5f08 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.387675] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Waiting for the task: (returnval){ [ 625.387675] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524363b8-4809-6391-a7a7-325cec438789" [ 625.387675] env[69475]: _type = "Task" [ 625.387675] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.388580] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "refresh_cache-7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.388710] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "refresh_cache-7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.388882] env[69475]: DEBUG nova.network.neutron [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 625.399184] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524363b8-4809-6391-a7a7-325cec438789, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.427648] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 625.516906] env[69475]: DEBUG oslo_concurrency.lockutils [None req-39d426b6-7000-42d4-9015-a69867bce2d1 tempest-ImagesNegativeTestJSON-100383407 tempest-ImagesNegativeTestJSON-100383407-project-member] Lock "ec7a6b3c-2a2f-4edd-8b79-ba55551d6159" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.358s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 625.596187] env[69475]: INFO nova.compute.manager [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Took 22.22 seconds to build instance. [ 625.702562] env[69475]: INFO nova.compute.manager [-] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Took 1.71 seconds to deallocate network for instance. [ 625.719342] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': task-3507564, 'name': Rename_Task, 'duration_secs': 0.193304} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.720115] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 625.720115] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df5b4774-4886-449d-8a87-417176125319 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.726847] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Waiting for the task: (returnval){ [ 625.726847] env[69475]: value = "task-3507565" [ 625.726847] env[69475]: _type = "Task" [ 625.726847] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.738869] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': task-3507565, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.902654] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524363b8-4809-6391-a7a7-325cec438789, 'name': SearchDatastore_Task, 'duration_secs': 0.013843} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.903662] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.904388] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 625.904388] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.904388] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.904388] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 625.905162] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cecf1d9e-0874-448c-9f3d-2292e517fac5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.915408] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 625.915408] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 625.915703] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a36de550-dec4-406c-ab89-d227633a0fab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.921966] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Waiting for the task: (returnval){ [ 625.921966] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5290d1aa-5102-70bf-5dee-42be0cdb634d" [ 625.921966] env[69475]: _type = "Task" [ 625.921966] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.930982] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5290d1aa-5102-70bf-5dee-42be0cdb634d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.967077] env[69475]: DEBUG nova.network.neutron [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 625.986662] env[69475]: DEBUG oslo_concurrency.lockutils [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquiring lock "d1a316d5-59ef-4286-9d7e-a444ffadc49d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 625.987323] env[69475]: DEBUG oslo_concurrency.lockutils [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "d1a316d5-59ef-4286-9d7e-a444ffadc49d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.080332] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Acquiring lock "3c253a57-1c93-4e8d-aaa1-1331c0547d85" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.080570] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Lock "3c253a57-1c93-4e8d-aaa1-1331c0547d85" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.098366] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d5c23d9-6ebd-4ffa-b5bf-9cd250a28c76 tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "25c44ae0-4193-4833-85ec-ebc0ef3cf593" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.751s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 626.213397] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.239793] env[69475]: DEBUG nova.network.neutron [-] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.241531] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': task-3507565, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.293985] env[69475]: DEBUG nova.network.neutron [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Updating instance_info_cache with network_info: [{"id": "242cecca-1cdb-42f1-92c0-0717cd78b7eb", "address": "fa:16:3e:f5:f3:ec", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap242cecca-1c", "ovs_interfaceid": "242cecca-1cdb-42f1-92c0-0717cd78b7eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.369174] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21aba6d3-e83c-4f1a-858e-fda7790541c8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.380793] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4960c4-474c-4f73-9451-cab9491f329a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.419266] env[69475]: DEBUG nova.network.neutron [req-8537e26d-a6c5-4978-bffd-5e746a79762e req-60e96a18-3af9-467a-b9e2-e1a12c7323bc service nova] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Updated VIF entry in instance network info cache for port 56faa0c7-80a1-46f5-8167-4485e94846ea. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 626.419934] env[69475]: DEBUG nova.network.neutron [req-8537e26d-a6c5-4978-bffd-5e746a79762e req-60e96a18-3af9-467a-b9e2-e1a12c7323bc service nova] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Updating instance_info_cache with network_info: [{"id": "56faa0c7-80a1-46f5-8167-4485e94846ea", "address": "fa:16:3e:60:39:c6", "network": {"id": "57d3fe81-db66-46d5-ba0f-eec2a4cfd7cb", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1825674224-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d1d7ffc7ecd34b918b1998fdc97c3425", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "da623279-b6f6-4570-8b15-a332120b8b60", "external-id": "nsx-vlan-transportzone-733", "segmentation_id": 733, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56faa0c7-80", "ovs_interfaceid": "56faa0c7-80a1-46f5-8167-4485e94846ea", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.422444] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43f63eac-aad0-439e-b26c-9e70c32b66aa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.438441] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5290d1aa-5102-70bf-5dee-42be0cdb634d, 'name': SearchDatastore_Task, 'duration_secs': 0.023865} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.440288] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0f68ac-ff16-41c1-9f88-b88e87d7bcb1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.446280] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34334d1a-6031-487a-9015-4024694be8d9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.460294] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Waiting for the task: (returnval){ [ 626.460294] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b12360-a8a8-5d3d-f3bb-91955c6fa9fa" [ 626.460294] env[69475]: _type = "Task" [ 626.460294] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.461271] env[69475]: DEBUG nova.compute.provider_tree [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 626.471881] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b12360-a8a8-5d3d-f3bb-91955c6fa9fa, 'name': SearchDatastore_Task, 'duration_secs': 0.012674} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.472145] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.472418] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] af5dc581-cf6a-4b84-8bcf-96606ae07cc1/af5dc581-cf6a-4b84-8bcf-96606ae07cc1.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 626.472685] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a486fec2-f6d9-4b2b-b341-d6803fb0183d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.481232] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Waiting for the task: (returnval){ [ 626.481232] env[69475]: value = "task-3507566" [ 626.481232] env[69475]: _type = "Task" [ 626.481232] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.490228] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': task-3507566, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.490591] env[69475]: DEBUG nova.compute.manager [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 626.602511] env[69475]: DEBUG nova.compute.manager [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 626.740865] env[69475]: DEBUG oslo_vmware.api [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': task-3507565, 'name': PowerOnVM_Task, 'duration_secs': 0.531683} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.743102] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 626.743102] env[69475]: INFO nova.compute.manager [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Took 11.89 seconds to spawn the instance on the hypervisor. [ 626.743102] env[69475]: DEBUG nova.compute.manager [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 626.743102] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2bcf6cf-af43-4f4b-b8e5-e9b4cbd8a79e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.748090] env[69475]: INFO nova.compute.manager [-] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Took 1.42 seconds to deallocate network for instance. [ 626.796744] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "refresh_cache-7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.797141] env[69475]: DEBUG nova.compute.manager [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Instance network_info: |[{"id": "242cecca-1cdb-42f1-92c0-0717cd78b7eb", "address": "fa:16:3e:f5:f3:ec", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap242cecca-1c", "ovs_interfaceid": "242cecca-1cdb-42f1-92c0-0717cd78b7eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 626.797924] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:f3:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e23c1d18-c841-49ea-95f3-df5ceac28afd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '242cecca-1cdb-42f1-92c0-0717cd78b7eb', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 626.806086] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Creating folder: Project (1073981d0d7740e78805798e02ff9d55). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 626.806539] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-164970b4-1433-445b-b11b-63ff24771b34 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.817472] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Created folder: Project (1073981d0d7740e78805798e02ff9d55) in parent group-v700823. [ 626.817684] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Creating folder: Instances. Parent ref: group-v700860. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 626.818100] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a468edb6-93b9-49b9-8bb4-4f2676cd6a6f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.830092] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Created folder: Instances in parent group-v700860. [ 626.830092] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 626.830092] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 626.830092] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-849d5f3d-510e-4d2e-8d25-c00331858c32 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.848502] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 626.848502] env[69475]: value = "task-3507569" [ 626.848502] env[69475]: _type = "Task" [ 626.848502] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.856605] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507569, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.926203] env[69475]: DEBUG oslo_concurrency.lockutils [req-8537e26d-a6c5-4978-bffd-5e746a79762e req-60e96a18-3af9-467a-b9e2-e1a12c7323bc service nova] Releasing lock "refresh_cache-7be48799-ea4a-4e7f-95c2-637460596cfc" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.967366] env[69475]: DEBUG nova.scheduler.client.report [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 626.995475] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': task-3507566, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.018418] env[69475]: DEBUG oslo_concurrency.lockutils [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.131895] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.258091] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.271226] env[69475]: INFO nova.compute.manager [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Took 18.84 seconds to build instance. [ 627.360585] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507569, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.475403] env[69475]: DEBUG nova.compute.manager [req-5fab3b49-e848-4990-8575-30fdbf8bafa5 req-0dd4e487-e560-428d-ace6-1737d6574f2f service nova] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Received event network-vif-plugged-d37fa2fb-69e3-46f1-a6a6-1794a902a86e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 627.475670] env[69475]: DEBUG oslo_concurrency.lockutils [req-5fab3b49-e848-4990-8575-30fdbf8bafa5 req-0dd4e487-e560-428d-ace6-1737d6574f2f service nova] Acquiring lock "af5dc581-cf6a-4b84-8bcf-96606ae07cc1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.475837] env[69475]: DEBUG oslo_concurrency.lockutils [req-5fab3b49-e848-4990-8575-30fdbf8bafa5 req-0dd4e487-e560-428d-ace6-1737d6574f2f service nova] Lock "af5dc581-cf6a-4b84-8bcf-96606ae07cc1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.476012] env[69475]: DEBUG oslo_concurrency.lockutils [req-5fab3b49-e848-4990-8575-30fdbf8bafa5 req-0dd4e487-e560-428d-ace6-1737d6574f2f service nova] Lock "af5dc581-cf6a-4b84-8bcf-96606ae07cc1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.476870] env[69475]: DEBUG nova.compute.manager [req-5fab3b49-e848-4990-8575-30fdbf8bafa5 req-0dd4e487-e560-428d-ace6-1737d6574f2f service nova] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] No waiting events found dispatching network-vif-plugged-d37fa2fb-69e3-46f1-a6a6-1794a902a86e {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 627.477281] env[69475]: WARNING nova.compute.manager [req-5fab3b49-e848-4990-8575-30fdbf8bafa5 req-0dd4e487-e560-428d-ace6-1737d6574f2f service nova] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Received unexpected event network-vif-plugged-d37fa2fb-69e3-46f1-a6a6-1794a902a86e for instance with vm_state building and task_state spawning. [ 627.477281] env[69475]: DEBUG nova.compute.manager [req-5fab3b49-e848-4990-8575-30fdbf8bafa5 req-0dd4e487-e560-428d-ace6-1737d6574f2f service nova] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Received event network-changed-d37fa2fb-69e3-46f1-a6a6-1794a902a86e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 627.477487] env[69475]: DEBUG nova.compute.manager [req-5fab3b49-e848-4990-8575-30fdbf8bafa5 req-0dd4e487-e560-428d-ace6-1737d6574f2f service nova] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Refreshing instance network info cache due to event network-changed-d37fa2fb-69e3-46f1-a6a6-1794a902a86e. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 627.477739] env[69475]: DEBUG oslo_concurrency.lockutils [req-5fab3b49-e848-4990-8575-30fdbf8bafa5 req-0dd4e487-e560-428d-ace6-1737d6574f2f service nova] Acquiring lock "refresh_cache-af5dc581-cf6a-4b84-8bcf-96606ae07cc1" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.477955] env[69475]: DEBUG oslo_concurrency.lockutils [req-5fab3b49-e848-4990-8575-30fdbf8bafa5 req-0dd4e487-e560-428d-ace6-1737d6574f2f service nova] Acquired lock "refresh_cache-af5dc581-cf6a-4b84-8bcf-96606ae07cc1" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.478167] env[69475]: DEBUG nova.network.neutron [req-5fab3b49-e848-4990-8575-30fdbf8bafa5 req-0dd4e487-e560-428d-ace6-1737d6574f2f service nova] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Refreshing network info cache for port d37fa2fb-69e3-46f1-a6a6-1794a902a86e {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 627.480054] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.509s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.480840] env[69475]: DEBUG nova.compute.manager [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 627.484075] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.814s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.489034] env[69475]: INFO nova.compute.claims [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 627.503963] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquiring lock "77a5665d-b00f-42c2-a1e8-319dfd232b06" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.504161] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Lock "77a5665d-b00f-42c2-a1e8-319dfd232b06" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.504623] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': task-3507566, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.877376} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.506075] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] af5dc581-cf6a-4b84-8bcf-96606ae07cc1/af5dc581-cf6a-4b84-8bcf-96606ae07cc1.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 627.506075] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 627.506075] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca1c4f35-9436-4969-8baa-40746138a592 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.516462] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Waiting for the task: (returnval){ [ 627.516462] env[69475]: value = "task-3507570" [ 627.516462] env[69475]: _type = "Task" [ 627.516462] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.526254] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': task-3507570, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.775115] env[69475]: DEBUG oslo_concurrency.lockutils [None req-239f1605-d3df-4b14-b703-7fb29daee9b8 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Lock "b255f4d7-b177-4d6c-8a28-dcb5a179c1c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.354s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.867457] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507569, 'name': CreateVM_Task, 'duration_secs': 0.561145} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.867457] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 627.868360] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.868547] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.868991] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 627.869324] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e00b4900-0d99-4d5e-b025-24c5243ac007 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.875605] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 627.875605] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526b3d37-49ab-1594-23ba-b6c89bca8522" [ 627.875605] env[69475]: _type = "Task" [ 627.875605] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.895610] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526b3d37-49ab-1594-23ba-b6c89bca8522, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.946689] env[69475]: DEBUG nova.compute.manager [None req-f4632e4c-ab74-46ca-b193-f4b27d567b24 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 627.946829] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09425ba9-d74f-4817-8457-bd414a43c833 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.991917] env[69475]: DEBUG nova.compute.utils [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 627.994040] env[69475]: DEBUG nova.compute.manager [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 627.994209] env[69475]: DEBUG nova.network.neutron [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 628.028033] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': task-3507570, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065021} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.028169] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 628.028946] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d1ae39-f309-47fd-bbf7-1b2510700655 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.059125] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Reconfiguring VM instance instance-0000000c to attach disk [datastore2] af5dc581-cf6a-4b84-8bcf-96606ae07cc1/af5dc581-cf6a-4b84-8bcf-96606ae07cc1.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 628.061817] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f00d33a5-5f33-49dd-8af0-8c835a7771e7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.082725] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Waiting for the task: (returnval){ [ 628.082725] env[69475]: value = "task-3507571" [ 628.082725] env[69475]: _type = "Task" [ 628.082725] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.091706] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': task-3507571, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.113676] env[69475]: DEBUG nova.policy [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31f6c0770b9a43a8b2257e0ea615c271', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '41196d44cf0c4102bf4cf95a3eaf2f89', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 628.283128] env[69475]: DEBUG nova.compute.manager [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 628.389330] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526b3d37-49ab-1594-23ba-b6c89bca8522, 'name': SearchDatastore_Task, 'duration_secs': 0.031196} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.389642] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.389904] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 628.390237] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.390466] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.390701] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 628.391062] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13d3ab79-fc43-4796-93f8-a2b51abe9449 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.403862] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 628.404079] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 628.404813] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa3579ad-03eb-4ab8-a862-7c1acd05dfc2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.410560] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 628.410560] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529ddcd2-3bbc-b509-181f-6c61c281caf3" [ 628.410560] env[69475]: _type = "Task" [ 628.410560] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.421630] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529ddcd2-3bbc-b509-181f-6c61c281caf3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.458703] env[69475]: INFO nova.compute.manager [None req-f4632e4c-ab74-46ca-b193-f4b27d567b24 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] instance snapshotting [ 628.459981] env[69475]: DEBUG nova.objects.instance [None req-f4632e4c-ab74-46ca-b193-f4b27d567b24 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Lazy-loading 'flavor' on Instance uuid 67287947-ecce-4462-8268-23bcc7421766 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 628.497285] env[69475]: DEBUG nova.compute.manager [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 628.594738] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': task-3507571, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.623554] env[69475]: DEBUG nova.network.neutron [req-5fab3b49-e848-4990-8575-30fdbf8bafa5 req-0dd4e487-e560-428d-ace6-1737d6574f2f service nova] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Updated VIF entry in instance network info cache for port d37fa2fb-69e3-46f1-a6a6-1794a902a86e. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 628.623901] env[69475]: DEBUG nova.network.neutron [req-5fab3b49-e848-4990-8575-30fdbf8bafa5 req-0dd4e487-e560-428d-ace6-1737d6574f2f service nova] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Updating instance_info_cache with network_info: [{"id": "d37fa2fb-69e3-46f1-a6a6-1794a902a86e", "address": "fa:16:3e:1d:11:bf", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.219", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd37fa2fb-69", "ovs_interfaceid": "d37fa2fb-69e3-46f1-a6a6-1794a902a86e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.818477] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.867713] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91510425-33b4-47a7-a132-3196df2d3249 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.876113] env[69475]: DEBUG nova.compute.manager [req-378a57c6-2e44-4843-b3d4-2b902ea20f71 req-40709fef-7b0a-49a0-9d53-6ee9c3dd7489 service nova] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Received event network-vif-plugged-242cecca-1cdb-42f1-92c0-0717cd78b7eb {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 628.876469] env[69475]: DEBUG oslo_concurrency.lockutils [req-378a57c6-2e44-4843-b3d4-2b902ea20f71 req-40709fef-7b0a-49a0-9d53-6ee9c3dd7489 service nova] Acquiring lock "7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.877937] env[69475]: DEBUG oslo_concurrency.lockutils [req-378a57c6-2e44-4843-b3d4-2b902ea20f71 req-40709fef-7b0a-49a0-9d53-6ee9c3dd7489 service nova] Lock "7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.878055] env[69475]: DEBUG oslo_concurrency.lockutils [req-378a57c6-2e44-4843-b3d4-2b902ea20f71 req-40709fef-7b0a-49a0-9d53-6ee9c3dd7489 service nova] Lock "7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.878641] env[69475]: DEBUG nova.compute.manager [req-378a57c6-2e44-4843-b3d4-2b902ea20f71 req-40709fef-7b0a-49a0-9d53-6ee9c3dd7489 service nova] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] No waiting events found dispatching network-vif-plugged-242cecca-1cdb-42f1-92c0-0717cd78b7eb {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 628.878641] env[69475]: WARNING nova.compute.manager [req-378a57c6-2e44-4843-b3d4-2b902ea20f71 req-40709fef-7b0a-49a0-9d53-6ee9c3dd7489 service nova] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Received unexpected event network-vif-plugged-242cecca-1cdb-42f1-92c0-0717cd78b7eb for instance with vm_state building and task_state spawning. [ 628.878796] env[69475]: DEBUG nova.compute.manager [req-378a57c6-2e44-4843-b3d4-2b902ea20f71 req-40709fef-7b0a-49a0-9d53-6ee9c3dd7489 service nova] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Received event network-changed-242cecca-1cdb-42f1-92c0-0717cd78b7eb {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 628.878923] env[69475]: DEBUG nova.compute.manager [req-378a57c6-2e44-4843-b3d4-2b902ea20f71 req-40709fef-7b0a-49a0-9d53-6ee9c3dd7489 service nova] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Refreshing instance network info cache due to event network-changed-242cecca-1cdb-42f1-92c0-0717cd78b7eb. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 628.879217] env[69475]: DEBUG oslo_concurrency.lockutils [req-378a57c6-2e44-4843-b3d4-2b902ea20f71 req-40709fef-7b0a-49a0-9d53-6ee9c3dd7489 service nova] Acquiring lock "refresh_cache-7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.879376] env[69475]: DEBUG oslo_concurrency.lockutils [req-378a57c6-2e44-4843-b3d4-2b902ea20f71 req-40709fef-7b0a-49a0-9d53-6ee9c3dd7489 service nova] Acquired lock "refresh_cache-7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 628.879591] env[69475]: DEBUG nova.network.neutron [req-378a57c6-2e44-4843-b3d4-2b902ea20f71 req-40709fef-7b0a-49a0-9d53-6ee9c3dd7489 service nova] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Refreshing network info cache for port 242cecca-1cdb-42f1-92c0-0717cd78b7eb {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 628.887600] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f79fc6-68ef-49db-b92e-eac74baefae0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.928921] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad22213-ba57-41db-9029-caa93d596e46 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.938697] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529ddcd2-3bbc-b509-181f-6c61c281caf3, 'name': SearchDatastore_Task, 'duration_secs': 0.029399} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.943410] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad5a0a45-8171-4b3b-92b0-085cdaf43a87 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.947025] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11484b54-a879-4cb1-8a70-9d8a7f2fe0d3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.958015] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 628.958015] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52317e9f-771b-da1c-ab31-a3a00a9d42df" [ 628.958015] env[69475]: _type = "Task" [ 628.958015] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.968965] env[69475]: DEBUG nova.compute.provider_tree [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 628.973964] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c2eb32-b5fa-41ef-9cc0-8e285976204b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.983997] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52317e9f-771b-da1c-ab31-a3a00a9d42df, 'name': SearchDatastore_Task, 'duration_secs': 0.011533} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.996143] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.996143] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3/7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 628.996680] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-804c52d7-4846-4e19-90a4-d35dc78d54f7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.999391] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f401aa1c-3280-41c4-b948-b2f157e361c8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.013891] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 629.013891] env[69475]: value = "task-3507572" [ 629.013891] env[69475]: _type = "Task" [ 629.013891] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.017207] env[69475]: DEBUG nova.network.neutron [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Successfully created port: b9c9e750-9412-44e9-9898-efc2a703f86b {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 629.025616] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507572, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.102537] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': task-3507571, 'name': ReconfigVM_Task, 'duration_secs': 0.825728} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.102876] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Reconfigured VM instance instance-0000000c to attach disk [datastore2] af5dc581-cf6a-4b84-8bcf-96606ae07cc1/af5dc581-cf6a-4b84-8bcf-96606ae07cc1.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 629.103724] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-32f4eb8b-e01a-45cf-88f7-ea79edd4e91a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.111316] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Waiting for the task: (returnval){ [ 629.111316] env[69475]: value = "task-3507573" [ 629.111316] env[69475]: _type = "Task" [ 629.111316] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.125553] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': task-3507573, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.126104] env[69475]: DEBUG oslo_concurrency.lockutils [req-5fab3b49-e848-4990-8575-30fdbf8bafa5 req-0dd4e487-e560-428d-ace6-1737d6574f2f service nova] Releasing lock "refresh_cache-af5dc581-cf6a-4b84-8bcf-96606ae07cc1" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 629.126341] env[69475]: DEBUG nova.compute.manager [req-5fab3b49-e848-4990-8575-30fdbf8bafa5 req-0dd4e487-e560-428d-ace6-1737d6574f2f service nova] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Received event network-vif-deleted-9cdc8cda-340c-4ebc-884a-d52746c1cda6 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 629.455831] env[69475]: DEBUG oslo_concurrency.lockutils [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Acquiring lock "67287947-ecce-4462-8268-23bcc7421766" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 629.456133] env[69475]: DEBUG oslo_concurrency.lockutils [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Lock "67287947-ecce-4462-8268-23bcc7421766" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 629.456353] env[69475]: DEBUG oslo_concurrency.lockutils [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Acquiring lock "67287947-ecce-4462-8268-23bcc7421766-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 629.456626] env[69475]: DEBUG oslo_concurrency.lockutils [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Lock "67287947-ecce-4462-8268-23bcc7421766-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 629.456821] env[69475]: DEBUG oslo_concurrency.lockutils [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Lock "67287947-ecce-4462-8268-23bcc7421766-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 629.459088] env[69475]: INFO nova.compute.manager [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Terminating instance [ 629.478871] env[69475]: DEBUG nova.scheduler.client.report [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 629.508368] env[69475]: DEBUG nova.compute.manager [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 629.518086] env[69475]: DEBUG nova.compute.manager [None req-f4632e4c-ab74-46ca-b193-f4b27d567b24 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Instance disappeared during snapshot {{(pid=69475) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 629.532332] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507572, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.542750] env[69475]: DEBUG nova.virt.hardware [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 629.543014] env[69475]: DEBUG nova.virt.hardware [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 629.543180] env[69475]: DEBUG nova.virt.hardware [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 629.543493] env[69475]: DEBUG nova.virt.hardware [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 629.543493] env[69475]: DEBUG nova.virt.hardware [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 629.543634] env[69475]: DEBUG nova.virt.hardware [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 629.543836] env[69475]: DEBUG nova.virt.hardware [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 629.543990] env[69475]: DEBUG nova.virt.hardware [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 629.544320] env[69475]: DEBUG nova.virt.hardware [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 629.544320] env[69475]: DEBUG nova.virt.hardware [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 629.544495] env[69475]: DEBUG nova.virt.hardware [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 629.545690] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e2cf8c-e371-4942-a425-bf1d8743e717 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.562312] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7666e25-587b-4171-a085-b178486c0f48 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.622218] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': task-3507573, 'name': Rename_Task, 'duration_secs': 0.16794} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.622218] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 629.622218] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83221ed6-43ef-4996-bd99-f8baccd056c5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.628079] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Waiting for the task: (returnval){ [ 629.628079] env[69475]: value = "task-3507574" [ 629.628079] env[69475]: _type = "Task" [ 629.628079] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.703791] env[69475]: DEBUG nova.compute.manager [None req-f4632e4c-ab74-46ca-b193-f4b27d567b24 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Found 0 images (rotation: 2) {{(pid=69475) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 629.792514] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "3eda17da-111c-412d-9af4-d3a40b7d8faa" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 629.792763] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "3eda17da-111c-412d-9af4-d3a40b7d8faa" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 629.825768] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "c078753c-48a6-490b-8d7d-b0832eced25e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 629.826045] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "c078753c-48a6-490b-8d7d-b0832eced25e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 629.964313] env[69475]: DEBUG oslo_concurrency.lockutils [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Acquiring lock "refresh_cache-67287947-ecce-4462-8268-23bcc7421766" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.964313] env[69475]: DEBUG oslo_concurrency.lockutils [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Acquired lock "refresh_cache-67287947-ecce-4462-8268-23bcc7421766" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.964313] env[69475]: DEBUG nova.network.neutron [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 629.989151] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.505s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 629.991303] env[69475]: DEBUG nova.compute.manager [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 629.992492] env[69475]: DEBUG oslo_concurrency.lockutils [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.058s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 629.993939] env[69475]: INFO nova.compute.claims [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 630.031118] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507572, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.615874} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.031618] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3/7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 630.031864] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 630.032178] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-39de28df-56c2-48e5-9446-7cb580d5faed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.040492] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 630.040492] env[69475]: value = "task-3507575" [ 630.040492] env[69475]: _type = "Task" [ 630.040492] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.054928] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507575, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.138810] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': task-3507574, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.234865] env[69475]: DEBUG nova.network.neutron [req-378a57c6-2e44-4843-b3d4-2b902ea20f71 req-40709fef-7b0a-49a0-9d53-6ee9c3dd7489 service nova] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Updated VIF entry in instance network info cache for port 242cecca-1cdb-42f1-92c0-0717cd78b7eb. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 630.237074] env[69475]: DEBUG nova.network.neutron [req-378a57c6-2e44-4843-b3d4-2b902ea20f71 req-40709fef-7b0a-49a0-9d53-6ee9c3dd7489 service nova] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Updating instance_info_cache with network_info: [{"id": "242cecca-1cdb-42f1-92c0-0717cd78b7eb", "address": "fa:16:3e:f5:f3:ec", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap242cecca-1c", "ovs_interfaceid": "242cecca-1cdb-42f1-92c0-0717cd78b7eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.504149] env[69475]: DEBUG nova.compute.utils [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 630.508750] env[69475]: DEBUG nova.compute.manager [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 630.508750] env[69475]: DEBUG nova.network.neutron [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 630.524022] env[69475]: DEBUG nova.network.neutron [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 630.555696] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507575, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.189711} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.556122] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 630.556916] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404d8993-0b12-48ea-bf19-d14615c48af8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.587468] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Reconfiguring VM instance instance-0000000d to attach disk [datastore2] 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3/7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 630.590404] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46d2ffec-b23b-4dea-a9ce-185a723a62c4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.610341] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 630.610341] env[69475]: value = "task-3507576" [ 630.610341] env[69475]: _type = "Task" [ 630.610341] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.618874] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507576, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.639967] env[69475]: DEBUG oslo_vmware.api [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': task-3507574, 'name': PowerOnVM_Task, 'duration_secs': 0.94283} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.640370] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 630.640666] env[69475]: INFO nova.compute.manager [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Took 9.93 seconds to spawn the instance on the hypervisor. [ 630.640870] env[69475]: DEBUG nova.compute.manager [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 630.641601] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2045980-7559-42f5-9888-1ee71ee5a0a3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.678834] env[69475]: DEBUG nova.network.neutron [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.699686] env[69475]: DEBUG nova.policy [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34ec94b3705a455a8bd13f54927167ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b14737c5edf94580b711ca21258a8811', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 630.741214] env[69475]: DEBUG oslo_concurrency.lockutils [req-378a57c6-2e44-4843-b3d4-2b902ea20f71 req-40709fef-7b0a-49a0-9d53-6ee9c3dd7489 service nova] Releasing lock "refresh_cache-7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.741487] env[69475]: DEBUG nova.compute.manager [req-378a57c6-2e44-4843-b3d4-2b902ea20f71 req-40709fef-7b0a-49a0-9d53-6ee9c3dd7489 service nova] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Received event network-vif-deleted-17befd20-3223-44d2-87ad-fed58da5076e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 631.016018] env[69475]: DEBUG nova.compute.manager [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 631.124189] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507576, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.161154] env[69475]: INFO nova.compute.manager [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Took 20.48 seconds to build instance. [ 631.181535] env[69475]: DEBUG oslo_concurrency.lockutils [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Releasing lock "refresh_cache-67287947-ecce-4462-8268-23bcc7421766" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 631.181757] env[69475]: DEBUG nova.compute.manager [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 631.181996] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 631.183122] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89b2c7dc-aa68-47f0-9176-f86453cb0c36 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.198698] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 631.199675] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6e6df9a-6518-4e41-9ecd-bdd7993acc0a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.210195] env[69475]: DEBUG oslo_vmware.api [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Waiting for the task: (returnval){ [ 631.210195] env[69475]: value = "task-3507577" [ 631.210195] env[69475]: _type = "Task" [ 631.210195] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.221269] env[69475]: DEBUG oslo_vmware.api [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': task-3507577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.270422] env[69475]: DEBUG nova.compute.manager [req-336abc43-0663-486a-829e-0d71ef563ae2 req-23bc19f2-7d0e-4860-877f-11dedb8f246a service nova] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Received event network-changed-806e8096-632b-4993-a27c-3eb4767e9d00 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 631.270615] env[69475]: DEBUG nova.compute.manager [req-336abc43-0663-486a-829e-0d71ef563ae2 req-23bc19f2-7d0e-4860-877f-11dedb8f246a service nova] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Refreshing instance network info cache due to event network-changed-806e8096-632b-4993-a27c-3eb4767e9d00. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 631.270805] env[69475]: DEBUG oslo_concurrency.lockutils [req-336abc43-0663-486a-829e-0d71ef563ae2 req-23bc19f2-7d0e-4860-877f-11dedb8f246a service nova] Acquiring lock "refresh_cache-9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.272512] env[69475]: DEBUG oslo_concurrency.lockutils [req-336abc43-0663-486a-829e-0d71ef563ae2 req-23bc19f2-7d0e-4860-877f-11dedb8f246a service nova] Acquired lock "refresh_cache-9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 631.272512] env[69475]: DEBUG nova.network.neutron [req-336abc43-0663-486a-829e-0d71ef563ae2 req-23bc19f2-7d0e-4860-877f-11dedb8f246a service nova] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Refreshing network info cache for port 806e8096-632b-4993-a27c-3eb4767e9d00 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 631.540142] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700f8d95-47a7-4962-aaff-2576b193f467 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.550810] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34460aa-b017-4c0e-a8ca-ad33298f6c9b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.587954] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd84b05-d719-411d-976a-d1b86a301dbf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.601591] env[69475]: DEBUG nova.network.neutron [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Successfully created port: 29b2e26b-edae-4c53-98e5-15ce643aa4d0 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 631.607530] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8840cb4c-cf0c-443e-9337-00cebddd42d0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.630931] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507576, 'name': ReconfigVM_Task, 'duration_secs': 0.722969} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.631452] env[69475]: DEBUG nova.compute.provider_tree [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 631.632826] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Reconfigured VM instance instance-0000000d to attach disk [datastore2] 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3/7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 631.637892] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d26d422-2009-483f-8b1e-4190a5de2197 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.643014] env[69475]: DEBUG oslo_concurrency.lockutils [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Acquiring lock "8f65d893-d2e2-452f-8870-f72ec036f16a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.645414] env[69475]: DEBUG oslo_concurrency.lockutils [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Lock "8f65d893-d2e2-452f-8870-f72ec036f16a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.649148] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 631.649148] env[69475]: value = "task-3507578" [ 631.649148] env[69475]: _type = "Task" [ 631.649148] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.661920] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507578, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.666368] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0898bc28-2835-40f4-845c-96b8b77e554e tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Lock "af5dc581-cf6a-4b84-8bcf-96606ae07cc1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.996s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.729658] env[69475]: DEBUG oslo_vmware.api [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': task-3507577, 'name': PowerOffVM_Task, 'duration_secs': 0.191165} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.729658] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 631.734235] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 631.734235] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ddc2fc42-b1b7-47ed-8df4-a74e93efbbdb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.762876] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 631.763049] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 631.763117] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Deleting the datastore file [datastore2] 67287947-ecce-4462-8268-23bcc7421766 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 631.763685] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a53255f4-baa0-4ca9-b080-2e2f34e9eb3b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.770825] env[69475]: DEBUG oslo_vmware.api [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Waiting for the task: (returnval){ [ 631.770825] env[69475]: value = "task-3507580" [ 631.770825] env[69475]: _type = "Task" [ 631.770825] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.780929] env[69475]: DEBUG nova.network.neutron [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Successfully updated port: b9c9e750-9412-44e9-9898-efc2a703f86b {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 631.784827] env[69475]: DEBUG oslo_vmware.api [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': task-3507580, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.029862] env[69475]: DEBUG nova.compute.manager [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 632.059232] env[69475]: DEBUG nova.network.neutron [req-336abc43-0663-486a-829e-0d71ef563ae2 req-23bc19f2-7d0e-4860-877f-11dedb8f246a service nova] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Updated VIF entry in instance network info cache for port 806e8096-632b-4993-a27c-3eb4767e9d00. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 632.060024] env[69475]: DEBUG nova.network.neutron [req-336abc43-0663-486a-829e-0d71ef563ae2 req-23bc19f2-7d0e-4860-877f-11dedb8f246a service nova] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Updating instance_info_cache with network_info: [{"id": "806e8096-632b-4993-a27c-3eb4767e9d00", "address": "fa:16:3e:db:bb:e6", "network": {"id": "1d7c82a8-9307-4c3e-938e-03ec482c5ac1", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1922599493-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25e8170617f6470dbcf8c36752c83214", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27e0a333-0cad-496c-8e6e-37a2edc97ac4", "external-id": "nsx-vlan-transportzone-83", "segmentation_id": 83, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap806e8096-63", "ovs_interfaceid": "806e8096-632b-4993-a27c-3eb4767e9d00", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.066948] env[69475]: DEBUG nova.virt.hardware [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 632.067188] env[69475]: DEBUG nova.virt.hardware [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 632.067338] env[69475]: DEBUG nova.virt.hardware [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 632.067511] env[69475]: DEBUG nova.virt.hardware [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 632.067652] env[69475]: DEBUG nova.virt.hardware [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 632.067793] env[69475]: DEBUG nova.virt.hardware [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 632.068089] env[69475]: DEBUG nova.virt.hardware [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 632.068254] env[69475]: DEBUG nova.virt.hardware [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 632.068418] env[69475]: DEBUG nova.virt.hardware [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 632.068574] env[69475]: DEBUG nova.virt.hardware [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 632.068742] env[69475]: DEBUG nova.virt.hardware [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 632.069621] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcfb6c6c-51dc-4cb4-b977-b8902ab1bc76 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.079022] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e1ae44-834a-458c-a500-2cd1a5c982e6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.141099] env[69475]: DEBUG nova.scheduler.client.report [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 632.162393] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507578, 'name': Rename_Task, 'duration_secs': 0.213743} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.163093] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 632.163394] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-beba5d71-b649-4ab0-a99c-73a96555dcb1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.168043] env[69475]: DEBUG nova.compute.manager [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 632.172255] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 632.172255] env[69475]: value = "task-3507581" [ 632.172255] env[69475]: _type = "Task" [ 632.172255] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.181414] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507581, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.211606] env[69475]: DEBUG nova.compute.manager [req-f6660fad-b373-48a2-adb7-2f2b59f53aae req-f97632b8-4e5d-4c1f-a761-a209802753fd service nova] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Received event network-vif-plugged-b9c9e750-9412-44e9-9898-efc2a703f86b {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 632.211863] env[69475]: DEBUG oslo_concurrency.lockutils [req-f6660fad-b373-48a2-adb7-2f2b59f53aae req-f97632b8-4e5d-4c1f-a761-a209802753fd service nova] Acquiring lock "a22a4d65-56eb-4313-bd0e-81148981f5b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.212119] env[69475]: DEBUG oslo_concurrency.lockutils [req-f6660fad-b373-48a2-adb7-2f2b59f53aae req-f97632b8-4e5d-4c1f-a761-a209802753fd service nova] Lock "a22a4d65-56eb-4313-bd0e-81148981f5b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.212324] env[69475]: DEBUG oslo_concurrency.lockutils [req-f6660fad-b373-48a2-adb7-2f2b59f53aae req-f97632b8-4e5d-4c1f-a761-a209802753fd service nova] Lock "a22a4d65-56eb-4313-bd0e-81148981f5b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.212507] env[69475]: DEBUG nova.compute.manager [req-f6660fad-b373-48a2-adb7-2f2b59f53aae req-f97632b8-4e5d-4c1f-a761-a209802753fd service nova] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] No waiting events found dispatching network-vif-plugged-b9c9e750-9412-44e9-9898-efc2a703f86b {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 632.212723] env[69475]: WARNING nova.compute.manager [req-f6660fad-b373-48a2-adb7-2f2b59f53aae req-f97632b8-4e5d-4c1f-a761-a209802753fd service nova] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Received unexpected event network-vif-plugged-b9c9e750-9412-44e9-9898-efc2a703f86b for instance with vm_state building and task_state spawning. [ 632.283963] env[69475]: DEBUG oslo_vmware.api [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Task: {'id': task-3507580, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.299799} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.284365] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 632.284655] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 632.284944] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 632.285239] env[69475]: INFO nova.compute.manager [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] [instance: 67287947-ecce-4462-8268-23bcc7421766] Took 1.10 seconds to destroy the instance on the hypervisor. [ 632.285595] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 632.286158] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Acquiring lock "refresh_cache-a22a4d65-56eb-4313-bd0e-81148981f5b8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.286291] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Acquired lock "refresh_cache-a22a4d65-56eb-4313-bd0e-81148981f5b8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.286430] env[69475]: DEBUG nova.network.neutron [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 632.289277] env[69475]: DEBUG nova.compute.manager [-] [instance: 67287947-ecce-4462-8268-23bcc7421766] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 632.289440] env[69475]: DEBUG nova.network.neutron [-] [instance: 67287947-ecce-4462-8268-23bcc7421766] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 632.306254] env[69475]: DEBUG nova.network.neutron [-] [instance: 67287947-ecce-4462-8268-23bcc7421766] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 632.474236] env[69475]: DEBUG nova.compute.manager [None req-5e0a0cac-550c-4aac-b3b7-8ff0077a7f58 tempest-ServerExternalEventsTest-31444780 tempest-ServerExternalEventsTest-31444780-project] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Received event network-changed {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 632.474481] env[69475]: DEBUG nova.compute.manager [None req-5e0a0cac-550c-4aac-b3b7-8ff0077a7f58 tempest-ServerExternalEventsTest-31444780 tempest-ServerExternalEventsTest-31444780-project] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Refreshing instance network info cache due to event network-changed. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 632.474645] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5e0a0cac-550c-4aac-b3b7-8ff0077a7f58 tempest-ServerExternalEventsTest-31444780 tempest-ServerExternalEventsTest-31444780-project] Acquiring lock "refresh_cache-af5dc581-cf6a-4b84-8bcf-96606ae07cc1" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.474781] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5e0a0cac-550c-4aac-b3b7-8ff0077a7f58 tempest-ServerExternalEventsTest-31444780 tempest-ServerExternalEventsTest-31444780-project] Acquired lock "refresh_cache-af5dc581-cf6a-4b84-8bcf-96606ae07cc1" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 632.474930] env[69475]: DEBUG nova.network.neutron [None req-5e0a0cac-550c-4aac-b3b7-8ff0077a7f58 tempest-ServerExternalEventsTest-31444780 tempest-ServerExternalEventsTest-31444780-project] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 632.563101] env[69475]: DEBUG oslo_concurrency.lockutils [req-336abc43-0663-486a-829e-0d71ef563ae2 req-23bc19f2-7d0e-4860-877f-11dedb8f246a service nova] Releasing lock "refresh_cache-9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 632.647189] env[69475]: DEBUG oslo_concurrency.lockutils [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.648335] env[69475]: DEBUG nova.compute.manager [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 632.649892] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.331s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.651319] env[69475]: INFO nova.compute.claims [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 632.687894] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507581, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.698028] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.809246] env[69475]: DEBUG nova.network.neutron [-] [instance: 67287947-ecce-4462-8268-23bcc7421766] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.823123] env[69475]: DEBUG nova.network.neutron [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 633.072071] env[69475]: DEBUG nova.network.neutron [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Updating instance_info_cache with network_info: [{"id": "b9c9e750-9412-44e9-9898-efc2a703f86b", "address": "fa:16:3e:e1:e8:af", "network": {"id": "553296bc-f68a-4461-a87f-0f707c2ead05", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-267014741-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41196d44cf0c4102bf4cf95a3eaf2f89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9875d38f-76e2-416c-bfb7-f18a22b0d8ee", "external-id": "nsx-vlan-transportzone-442", "segmentation_id": 442, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9c9e750-94", "ovs_interfaceid": "b9c9e750-9412-44e9-9898-efc2a703f86b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.162331] env[69475]: DEBUG nova.compute.utils [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 633.164433] env[69475]: DEBUG nova.compute.manager [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 633.164736] env[69475]: DEBUG nova.network.neutron [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 633.192070] env[69475]: DEBUG oslo_vmware.api [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507581, 'name': PowerOnVM_Task, 'duration_secs': 1.012821} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.192388] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 633.192596] env[69475]: INFO nova.compute.manager [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Took 10.11 seconds to spawn the instance on the hypervisor. [ 633.192776] env[69475]: DEBUG nova.compute.manager [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 633.193603] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ce9f7b-c448-4b4f-9d50-958fe73ff3a1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.249299] env[69475]: DEBUG nova.policy [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '32d8efff6f9e4846b49febaf379f07fb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1784f9c01de49c494bc44e0272c02cf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 633.311819] env[69475]: INFO nova.compute.manager [-] [instance: 67287947-ecce-4462-8268-23bcc7421766] Took 1.02 seconds to deallocate network for instance. [ 633.323660] env[69475]: DEBUG nova.network.neutron [None req-5e0a0cac-550c-4aac-b3b7-8ff0077a7f58 tempest-ServerExternalEventsTest-31444780 tempest-ServerExternalEventsTest-31444780-project] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Updating instance_info_cache with network_info: [{"id": "d37fa2fb-69e3-46f1-a6a6-1794a902a86e", "address": "fa:16:3e:1d:11:bf", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.219", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd37fa2fb-69", "ovs_interfaceid": "d37fa2fb-69e3-46f1-a6a6-1794a902a86e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.353989] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Acquiring lock "af5dc581-cf6a-4b84-8bcf-96606ae07cc1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.354300] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Lock "af5dc581-cf6a-4b84-8bcf-96606ae07cc1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.355054] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Acquiring lock "af5dc581-cf6a-4b84-8bcf-96606ae07cc1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.355054] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Lock "af5dc581-cf6a-4b84-8bcf-96606ae07cc1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 633.355054] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Lock "af5dc581-cf6a-4b84-8bcf-96606ae07cc1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 633.357265] env[69475]: INFO nova.compute.manager [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Terminating instance [ 633.479457] env[69475]: DEBUG nova.network.neutron [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Successfully updated port: 29b2e26b-edae-4c53-98e5-15ce643aa4d0 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 633.576017] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Releasing lock "refresh_cache-a22a4d65-56eb-4313-bd0e-81148981f5b8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.576369] env[69475]: DEBUG nova.compute.manager [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Instance network_info: |[{"id": "b9c9e750-9412-44e9-9898-efc2a703f86b", "address": "fa:16:3e:e1:e8:af", "network": {"id": "553296bc-f68a-4461-a87f-0f707c2ead05", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-267014741-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41196d44cf0c4102bf4cf95a3eaf2f89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9875d38f-76e2-416c-bfb7-f18a22b0d8ee", "external-id": "nsx-vlan-transportzone-442", "segmentation_id": 442, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9c9e750-94", "ovs_interfaceid": "b9c9e750-9412-44e9-9898-efc2a703f86b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 633.576781] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e1:e8:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9875d38f-76e2-416c-bfb7-f18a22b0d8ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9c9e750-9412-44e9-9898-efc2a703f86b', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 633.585717] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Creating folder: Project (41196d44cf0c4102bf4cf95a3eaf2f89). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 633.586101] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aca5d90a-b398-41d9-8183-8839e11290e3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.599230] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Created folder: Project (41196d44cf0c4102bf4cf95a3eaf2f89) in parent group-v700823. [ 633.599230] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Creating folder: Instances. Parent ref: group-v700863. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 633.599230] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a2c7d60e-fa9d-406b-9f3c-dddfb5edc730 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.608610] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Created folder: Instances in parent group-v700863. [ 633.608610] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 633.608753] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 633.608847] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9840cd4a-811c-4947-82e5-148eef320946 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.635335] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 633.635335] env[69475]: value = "task-3507584" [ 633.635335] env[69475]: _type = "Task" [ 633.635335] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.643274] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507584, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.676211] env[69475]: DEBUG nova.compute.manager [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 633.727905] env[69475]: INFO nova.compute.manager [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Took 20.71 seconds to build instance. [ 633.792546] env[69475]: DEBUG nova.network.neutron [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Successfully created port: 9e51856c-7355-448c-82fc-e5af23bb0fcf {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 633.819728] env[69475]: DEBUG oslo_concurrency.lockutils [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 633.826571] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5e0a0cac-550c-4aac-b3b7-8ff0077a7f58 tempest-ServerExternalEventsTest-31444780 tempest-ServerExternalEventsTest-31444780-project] Releasing lock "refresh_cache-af5dc581-cf6a-4b84-8bcf-96606ae07cc1" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 633.863926] env[69475]: DEBUG nova.compute.manager [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 633.863926] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 633.867367] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2789d2e7-b8ac-4010-afa2-804ace83ea7e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.877021] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 633.877021] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fa3a9332-4cda-476f-8b16-99bc67523376 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.885292] env[69475]: DEBUG oslo_vmware.api [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Waiting for the task: (returnval){ [ 633.885292] env[69475]: value = "task-3507585" [ 633.885292] env[69475]: _type = "Task" [ 633.885292] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.894041] env[69475]: DEBUG oslo_vmware.api [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': task-3507585, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.981259] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "refresh_cache-93607154-f135-4925-9c3a-a97051535b00" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 633.982340] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquired lock "refresh_cache-93607154-f135-4925-9c3a-a97051535b00" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 633.982340] env[69475]: DEBUG nova.network.neutron [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 634.084419] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e16508-2904-4228-8ab7-cf75eccdf8bf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.093649] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a1bf68-2087-481d-b139-9b837dc624e3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.135754] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48fa4d5-758d-473e-84ed-51a8ef8b8307 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.153027] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d5561f-5589-4bd8-99c4-b1360bb915c5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.157162] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507584, 'name': CreateVM_Task, 'duration_secs': 0.500772} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.157352] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 634.158482] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.158643] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 634.158954] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 634.159224] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78784d36-7cb2-4d6a-b4d1-c53004e75c38 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.169909] env[69475]: DEBUG nova.compute.provider_tree [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.176284] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Waiting for the task: (returnval){ [ 634.176284] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52174360-ba23-6ca3-2c84-f76d7fca2dc5" [ 634.176284] env[69475]: _type = "Task" [ 634.176284] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.182910] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52174360-ba23-6ca3-2c84-f76d7fca2dc5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.232265] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7d813b5-1ced-41ae-85d0-ef9d445099ad tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.226s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 634.235327] env[69475]: DEBUG nova.compute.manager [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 634.238411] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2729ddc6-17f1-44a7-9fc3-2a7125c3ccb3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.252561] env[69475]: DEBUG nova.compute.manager [req-e9a73878-ec44-4b2a-a279-85a8b6d294d6 req-61c087e0-1340-4b1c-a9a4-d56a511946f8 service nova] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Received event network-changed-73a9904f-d8b5-4a55-8338-3f26cce4f9f7 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 634.252787] env[69475]: DEBUG nova.compute.manager [req-e9a73878-ec44-4b2a-a279-85a8b6d294d6 req-61c087e0-1340-4b1c-a9a4-d56a511946f8 service nova] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Refreshing instance network info cache due to event network-changed-73a9904f-d8b5-4a55-8338-3f26cce4f9f7. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 634.253058] env[69475]: DEBUG oslo_concurrency.lockutils [req-e9a73878-ec44-4b2a-a279-85a8b6d294d6 req-61c087e0-1340-4b1c-a9a4-d56a511946f8 service nova] Acquiring lock "refresh_cache-25c44ae0-4193-4833-85ec-ebc0ef3cf593" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.253248] env[69475]: DEBUG oslo_concurrency.lockutils [req-e9a73878-ec44-4b2a-a279-85a8b6d294d6 req-61c087e0-1340-4b1c-a9a4-d56a511946f8 service nova] Acquired lock "refresh_cache-25c44ae0-4193-4833-85ec-ebc0ef3cf593" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 634.253688] env[69475]: DEBUG nova.network.neutron [req-e9a73878-ec44-4b2a-a279-85a8b6d294d6 req-61c087e0-1340-4b1c-a9a4-d56a511946f8 service nova] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Refreshing network info cache for port 73a9904f-d8b5-4a55-8338-3f26cce4f9f7 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 634.399567] env[69475]: DEBUG oslo_vmware.api [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': task-3507585, 'name': PowerOffVM_Task, 'duration_secs': 0.219663} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.400632] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 634.400632] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 634.400632] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a391e51-ea7c-4e50-965d-7172ae6b8580 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.459404] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 634.459614] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 634.459794] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Deleting the datastore file [datastore2] af5dc581-cf6a-4b84-8bcf-96606ae07cc1 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 634.460280] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9d88e03a-0922-436b-9b2e-27d33aca7cdb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.470040] env[69475]: DEBUG oslo_vmware.api [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Waiting for the task: (returnval){ [ 634.470040] env[69475]: value = "task-3507587" [ 634.470040] env[69475]: _type = "Task" [ 634.470040] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.478901] env[69475]: DEBUG oslo_vmware.api [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': task-3507587, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.569056] env[69475]: DEBUG nova.network.neutron [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 634.674086] env[69475]: DEBUG nova.scheduler.client.report [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 634.690172] env[69475]: DEBUG nova.compute.manager [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 634.692439] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52174360-ba23-6ca3-2c84-f76d7fca2dc5, 'name': SearchDatastore_Task, 'duration_secs': 0.011999} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.694731] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 634.694731] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 634.694731] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.694731] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 634.694872] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 634.694872] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1acfc74d-0365-41c3-bd59-deb849edfb24 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.706201] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 634.706363] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 634.707494] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12f8a1b2-0b5b-4dd5-b1aa-505cb1050858 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.718132] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Waiting for the task: (returnval){ [ 634.718132] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52342b88-d6b1-9f01-37dd-eb36768e3242" [ 634.718132] env[69475]: _type = "Task" [ 634.718132] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 634.731438] env[69475]: DEBUG nova.virt.hardware [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 634.731438] env[69475]: DEBUG nova.virt.hardware [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 634.731438] env[69475]: DEBUG nova.virt.hardware [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 634.731438] env[69475]: DEBUG nova.virt.hardware [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 634.731729] env[69475]: DEBUG nova.virt.hardware [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 634.731729] env[69475]: DEBUG nova.virt.hardware [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 634.731729] env[69475]: DEBUG nova.virt.hardware [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 634.731729] env[69475]: DEBUG nova.virt.hardware [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 634.731729] env[69475]: DEBUG nova.virt.hardware [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 634.732018] env[69475]: DEBUG nova.virt.hardware [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 634.732018] env[69475]: DEBUG nova.virt.hardware [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 634.732018] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0c0499a-5935-4436-a676-39298a0d9035 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.737213] env[69475]: DEBUG nova.compute.manager [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 634.740980] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52342b88-d6b1-9f01-37dd-eb36768e3242, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.748741] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca6d1de-810e-4c33-b3e9-27ad41deb4fb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.764271] env[69475]: INFO nova.compute.manager [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] instance snapshotting [ 634.772022] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae6e1827-2bf8-427d-a372-768e40464c1f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.796251] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a647e9-dabc-4c46-ae4a-d69c34e1ea9c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.833792] env[69475]: DEBUG nova.compute.manager [req-d36d793b-2a91-4dea-b8d8-d26163b10f79 req-0c0fbf6a-7aa3-4447-8e75-a594f48589e0 service nova] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Received event network-changed-b9c9e750-9412-44e9-9898-efc2a703f86b {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 634.833991] env[69475]: DEBUG nova.compute.manager [req-d36d793b-2a91-4dea-b8d8-d26163b10f79 req-0c0fbf6a-7aa3-4447-8e75-a594f48589e0 service nova] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Refreshing instance network info cache due to event network-changed-b9c9e750-9412-44e9-9898-efc2a703f86b. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 634.834282] env[69475]: DEBUG oslo_concurrency.lockutils [req-d36d793b-2a91-4dea-b8d8-d26163b10f79 req-0c0fbf6a-7aa3-4447-8e75-a594f48589e0 service nova] Acquiring lock "refresh_cache-a22a4d65-56eb-4313-bd0e-81148981f5b8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 634.834377] env[69475]: DEBUG oslo_concurrency.lockutils [req-d36d793b-2a91-4dea-b8d8-d26163b10f79 req-0c0fbf6a-7aa3-4447-8e75-a594f48589e0 service nova] Acquired lock "refresh_cache-a22a4d65-56eb-4313-bd0e-81148981f5b8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 634.834947] env[69475]: DEBUG nova.network.neutron [req-d36d793b-2a91-4dea-b8d8-d26163b10f79 req-0c0fbf6a-7aa3-4447-8e75-a594f48589e0 service nova] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Refreshing network info cache for port b9c9e750-9412-44e9-9898-efc2a703f86b {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 634.982937] env[69475]: DEBUG oslo_vmware.api [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Task: {'id': task-3507587, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139526} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.983392] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 634.983639] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 634.983813] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 634.983983] env[69475]: INFO nova.compute.manager [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Took 1.12 seconds to destroy the instance on the hypervisor. [ 634.984245] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 634.984741] env[69475]: DEBUG nova.compute.manager [-] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 634.984841] env[69475]: DEBUG nova.network.neutron [-] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 635.180400] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.530s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.180923] env[69475]: DEBUG nova.compute.manager [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 635.183613] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.756s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.184977] env[69475]: INFO nova.compute.claims [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 635.230149] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52342b88-d6b1-9f01-37dd-eb36768e3242, 'name': SearchDatastore_Task, 'duration_secs': 0.030814} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.230931] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60f29a50-5cd6-4491-85e7-51b7d1a706ae {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.236685] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Waiting for the task: (returnval){ [ 635.236685] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5243ed5f-0a41-8887-f98c-40421d1a5d8b" [ 635.236685] env[69475]: _type = "Task" [ 635.236685] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.246265] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5243ed5f-0a41-8887-f98c-40421d1a5d8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.266501] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.308779] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 635.309116] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d4778b36-82be-4070-a91d-f0cdd47bc92b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.317299] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 635.317299] env[69475]: value = "task-3507588" [ 635.317299] env[69475]: _type = "Task" [ 635.317299] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.327100] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507588, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.351644] env[69475]: DEBUG nova.network.neutron [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Updating instance_info_cache with network_info: [{"id": "29b2e26b-edae-4c53-98e5-15ce643aa4d0", "address": "fa:16:3e:f1:c1:a3", "network": {"id": "7baa3036-0da2-4997-9256-7e09e8122eec", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1554281652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b14737c5edf94580b711ca21258a8811", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29b2e26b-ed", "ovs_interfaceid": "29b2e26b-edae-4c53-98e5-15ce643aa4d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.552637] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Acquiring lock "3149cd80-503c-42e4-ac91-54aababe84e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.552848] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Lock "3149cd80-503c-42e4-ac91-54aababe84e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.690687] env[69475]: DEBUG nova.compute.utils [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 635.694212] env[69475]: DEBUG nova.compute.manager [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 635.694393] env[69475]: DEBUG nova.network.neutron [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 635.712047] env[69475]: DEBUG nova.network.neutron [req-e9a73878-ec44-4b2a-a279-85a8b6d294d6 req-61c087e0-1340-4b1c-a9a4-d56a511946f8 service nova] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Updated VIF entry in instance network info cache for port 73a9904f-d8b5-4a55-8338-3f26cce4f9f7. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 635.713311] env[69475]: DEBUG nova.network.neutron [req-e9a73878-ec44-4b2a-a279-85a8b6d294d6 req-61c087e0-1340-4b1c-a9a4-d56a511946f8 service nova] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Updating instance_info_cache with network_info: [{"id": "73a9904f-d8b5-4a55-8338-3f26cce4f9f7", "address": "fa:16:3e:f3:5c:c6", "network": {"id": "c35eed50-417f-4eee-92d8-63f9c06d148f", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-49170861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947a74cfc69b45dbb3aa09060c5b76f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap73a9904f-d8", "ovs_interfaceid": "73a9904f-d8b5-4a55-8338-3f26cce4f9f7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.753855] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5243ed5f-0a41-8887-f98c-40421d1a5d8b, 'name': SearchDatastore_Task, 'duration_secs': 0.01067} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 635.754287] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 635.754525] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] a22a4d65-56eb-4313-bd0e-81148981f5b8/a22a4d65-56eb-4313-bd0e-81148981f5b8.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 635.754868] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51a2e573-dea3-4a2d-a6c0-0c25bb506066 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.762922] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Waiting for the task: (returnval){ [ 635.762922] env[69475]: value = "task-3507589" [ 635.762922] env[69475]: _type = "Task" [ 635.762922] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.776700] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': task-3507589, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.829172] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507588, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 635.858022] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Releasing lock "refresh_cache-93607154-f135-4925-9c3a-a97051535b00" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 635.858022] env[69475]: DEBUG nova.compute.manager [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Instance network_info: |[{"id": "29b2e26b-edae-4c53-98e5-15ce643aa4d0", "address": "fa:16:3e:f1:c1:a3", "network": {"id": "7baa3036-0da2-4997-9256-7e09e8122eec", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1554281652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b14737c5edf94580b711ca21258a8811", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29b2e26b-ed", "ovs_interfaceid": "29b2e26b-edae-4c53-98e5-15ce643aa4d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 635.858389] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:c1:a3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e445fb59-822c-4d7d-943b-c8e3bbaca62e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '29b2e26b-edae-4c53-98e5-15ce643aa4d0', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 635.866093] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Creating folder: Project (b14737c5edf94580b711ca21258a8811). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 635.867844] env[69475]: DEBUG nova.policy [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '32d8efff6f9e4846b49febaf379f07fb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1784f9c01de49c494bc44e0272c02cf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 635.869502] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8acaf099-3882-4f8c-bc6e-b1857f8d94ed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.881675] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Created folder: Project (b14737c5edf94580b711ca21258a8811) in parent group-v700823. [ 635.881982] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Creating folder: Instances. Parent ref: group-v700866. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 635.882323] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-886d645a-9e96-43fd-b8ef-e6eb7d2fe9e2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.890926] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Created folder: Instances in parent group-v700866. [ 635.891187] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 635.891379] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93607154-f135-4925-9c3a-a97051535b00] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 635.891597] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4920f43a-a210-4e46-bee2-5eeb6a10cf71 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.912775] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 635.912775] env[69475]: value = "task-3507592" [ 635.912775] env[69475]: _type = "Task" [ 635.912775] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 635.922066] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507592, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.121501] env[69475]: DEBUG nova.network.neutron [req-d36d793b-2a91-4dea-b8d8-d26163b10f79 req-0c0fbf6a-7aa3-4447-8e75-a594f48589e0 service nova] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Updated VIF entry in instance network info cache for port b9c9e750-9412-44e9-9898-efc2a703f86b. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 636.121944] env[69475]: DEBUG nova.network.neutron [req-d36d793b-2a91-4dea-b8d8-d26163b10f79 req-0c0fbf6a-7aa3-4447-8e75-a594f48589e0 service nova] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Updating instance_info_cache with network_info: [{"id": "b9c9e750-9412-44e9-9898-efc2a703f86b", "address": "fa:16:3e:e1:e8:af", "network": {"id": "553296bc-f68a-4461-a87f-0f707c2ead05", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-267014741-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "41196d44cf0c4102bf4cf95a3eaf2f89", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9875d38f-76e2-416c-bfb7-f18a22b0d8ee", "external-id": "nsx-vlan-transportzone-442", "segmentation_id": 442, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9c9e750-94", "ovs_interfaceid": "b9c9e750-9412-44e9-9898-efc2a703f86b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.194663] env[69475]: DEBUG nova.compute.manager [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 636.216427] env[69475]: DEBUG oslo_concurrency.lockutils [req-e9a73878-ec44-4b2a-a279-85a8b6d294d6 req-61c087e0-1340-4b1c-a9a4-d56a511946f8 service nova] Releasing lock "refresh_cache-25c44ae0-4193-4833-85ec-ebc0ef3cf593" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 636.219023] env[69475]: DEBUG nova.compute.manager [req-e9a73878-ec44-4b2a-a279-85a8b6d294d6 req-61c087e0-1340-4b1c-a9a4-d56a511946f8 service nova] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Received event network-changed-267ce176-9932-4001-a96f-4e89c511fca3 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 636.219023] env[69475]: DEBUG nova.compute.manager [req-e9a73878-ec44-4b2a-a279-85a8b6d294d6 req-61c087e0-1340-4b1c-a9a4-d56a511946f8 service nova] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Refreshing instance network info cache due to event network-changed-267ce176-9932-4001-a96f-4e89c511fca3. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 636.219023] env[69475]: DEBUG oslo_concurrency.lockutils [req-e9a73878-ec44-4b2a-a279-85a8b6d294d6 req-61c087e0-1340-4b1c-a9a4-d56a511946f8 service nova] Acquiring lock "refresh_cache-b255f4d7-b177-4d6c-8a28-dcb5a179c1c0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.219023] env[69475]: DEBUG oslo_concurrency.lockutils [req-e9a73878-ec44-4b2a-a279-85a8b6d294d6 req-61c087e0-1340-4b1c-a9a4-d56a511946f8 service nova] Acquired lock "refresh_cache-b255f4d7-b177-4d6c-8a28-dcb5a179c1c0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 636.219023] env[69475]: DEBUG nova.network.neutron [req-e9a73878-ec44-4b2a-a279-85a8b6d294d6 req-61c087e0-1340-4b1c-a9a4-d56a511946f8 service nova] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Refreshing network info cache for port 267ce176-9932-4001-a96f-4e89c511fca3 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 636.282687] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': task-3507589, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469834} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.283583] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] a22a4d65-56eb-4313-bd0e-81148981f5b8/a22a4d65-56eb-4313-bd0e-81148981f5b8.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 636.283845] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 636.284278] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8b1dd1f-bde6-42ab-a822-0fef27ed2271 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.295018] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Waiting for the task: (returnval){ [ 636.295018] env[69475]: value = "task-3507593" [ 636.295018] env[69475]: _type = "Task" [ 636.295018] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.305730] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': task-3507593, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.332025] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507588, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.427702] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507592, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.488862] env[69475]: DEBUG nova.network.neutron [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Successfully updated port: 9e51856c-7355-448c-82fc-e5af23bb0fcf {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 636.583260] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b42e5e-22f1-47f3-9d3f-de2ff05bb010 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.591649] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a1e5d0-8c4b-49f8-bf75-da07b1679991 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.625243] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5cd18c1-5591-472f-999b-4da6eebc62b3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.628675] env[69475]: DEBUG oslo_concurrency.lockutils [req-d36d793b-2a91-4dea-b8d8-d26163b10f79 req-0c0fbf6a-7aa3-4447-8e75-a594f48589e0 service nova] Releasing lock "refresh_cache-a22a4d65-56eb-4313-bd0e-81148981f5b8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 636.635060] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8805c16-b73d-44f8-8ea8-cd1e637d01d1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.650659] env[69475]: DEBUG nova.compute.provider_tree [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.713021] env[69475]: DEBUG nova.network.neutron [-] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 636.805625] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': task-3507593, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061532} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.805931] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 636.806779] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2948a7f-6ec2-4ca3-b14d-89166be6f235 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.831384] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] a22a4d65-56eb-4313-bd0e-81148981f5b8/a22a4d65-56eb-4313-bd0e-81148981f5b8.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 636.834976] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5912e60b-526e-4e7c-a650-74f19b24765f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.856493] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507588, 'name': CreateSnapshot_Task, 'duration_secs': 1.150766} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.857715] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 636.858041] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Waiting for the task: (returnval){ [ 636.858041] env[69475]: value = "task-3507594" [ 636.858041] env[69475]: _type = "Task" [ 636.858041] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.858725] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923a5ccb-44ef-4c82-9edc-915e4e3880d9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.875790] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': task-3507594, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.925691] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Acquiring lock "b87cac84-ea70-428b-872e-4f6145e36b39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.925691] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Lock "b87cac84-ea70-428b-872e-4f6145e36b39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 636.936073] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507592, 'name': CreateVM_Task, 'duration_secs': 0.587043} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 636.936073] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93607154-f135-4925-9c3a-a97051535b00] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 636.936073] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.936073] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 636.936073] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 636.936073] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2db61704-5125-45f9-ad9a-5c5b9ce3ca70 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.941399] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 636.941399] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520dfb4d-a45f-1dbb-36a4-09c3281ced16" [ 636.941399] env[69475]: _type = "Task" [ 636.941399] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 636.952409] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520dfb4d-a45f-1dbb-36a4-09c3281ced16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 636.995562] env[69475]: DEBUG oslo_concurrency.lockutils [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "refresh_cache-235653ac-a893-4f42-a394-dd81f61f0d73" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.995735] env[69475]: DEBUG oslo_concurrency.lockutils [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired lock "refresh_cache-235653ac-a893-4f42-a394-dd81f61f0d73" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 636.995938] env[69475]: DEBUG nova.network.neutron [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 637.067257] env[69475]: DEBUG nova.network.neutron [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Successfully created port: 65a50486-30b1-4098-94d5-abba26c7c25b {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 637.153950] env[69475]: DEBUG nova.scheduler.client.report [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 637.168813] env[69475]: DEBUG nova.compute.manager [req-b938f805-df27-4f49-88d9-3155b58c7031 req-8a219d32-a2ad-4e94-a69f-9399c70d3589 service nova] [instance: 93607154-f135-4925-9c3a-a97051535b00] Received event network-vif-plugged-29b2e26b-edae-4c53-98e5-15ce643aa4d0 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 637.169315] env[69475]: DEBUG oslo_concurrency.lockutils [req-b938f805-df27-4f49-88d9-3155b58c7031 req-8a219d32-a2ad-4e94-a69f-9399c70d3589 service nova] Acquiring lock "93607154-f135-4925-9c3a-a97051535b00-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 637.170606] env[69475]: DEBUG oslo_concurrency.lockutils [req-b938f805-df27-4f49-88d9-3155b58c7031 req-8a219d32-a2ad-4e94-a69f-9399c70d3589 service nova] Lock "93607154-f135-4925-9c3a-a97051535b00-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 637.170606] env[69475]: DEBUG oslo_concurrency.lockutils [req-b938f805-df27-4f49-88d9-3155b58c7031 req-8a219d32-a2ad-4e94-a69f-9399c70d3589 service nova] Lock "93607154-f135-4925-9c3a-a97051535b00-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.170606] env[69475]: DEBUG nova.compute.manager [req-b938f805-df27-4f49-88d9-3155b58c7031 req-8a219d32-a2ad-4e94-a69f-9399c70d3589 service nova] [instance: 93607154-f135-4925-9c3a-a97051535b00] No waiting events found dispatching network-vif-plugged-29b2e26b-edae-4c53-98e5-15ce643aa4d0 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 637.170769] env[69475]: WARNING nova.compute.manager [req-b938f805-df27-4f49-88d9-3155b58c7031 req-8a219d32-a2ad-4e94-a69f-9399c70d3589 service nova] [instance: 93607154-f135-4925-9c3a-a97051535b00] Received unexpected event network-vif-plugged-29b2e26b-edae-4c53-98e5-15ce643aa4d0 for instance with vm_state building and task_state spawning. [ 637.170880] env[69475]: DEBUG nova.compute.manager [req-b938f805-df27-4f49-88d9-3155b58c7031 req-8a219d32-a2ad-4e94-a69f-9399c70d3589 service nova] [instance: 93607154-f135-4925-9c3a-a97051535b00] Received event network-changed-29b2e26b-edae-4c53-98e5-15ce643aa4d0 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 637.171066] env[69475]: DEBUG nova.compute.manager [req-b938f805-df27-4f49-88d9-3155b58c7031 req-8a219d32-a2ad-4e94-a69f-9399c70d3589 service nova] [instance: 93607154-f135-4925-9c3a-a97051535b00] Refreshing instance network info cache due to event network-changed-29b2e26b-edae-4c53-98e5-15ce643aa4d0. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 637.173736] env[69475]: DEBUG oslo_concurrency.lockutils [req-b938f805-df27-4f49-88d9-3155b58c7031 req-8a219d32-a2ad-4e94-a69f-9399c70d3589 service nova] Acquiring lock "refresh_cache-93607154-f135-4925-9c3a-a97051535b00" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.173736] env[69475]: DEBUG oslo_concurrency.lockutils [req-b938f805-df27-4f49-88d9-3155b58c7031 req-8a219d32-a2ad-4e94-a69f-9399c70d3589 service nova] Acquired lock "refresh_cache-93607154-f135-4925-9c3a-a97051535b00" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 637.173736] env[69475]: DEBUG nova.network.neutron [req-b938f805-df27-4f49-88d9-3155b58c7031 req-8a219d32-a2ad-4e94-a69f-9399c70d3589 service nova] [instance: 93607154-f135-4925-9c3a-a97051535b00] Refreshing network info cache for port 29b2e26b-edae-4c53-98e5-15ce643aa4d0 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 637.209237] env[69475]: DEBUG nova.compute.manager [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 637.212575] env[69475]: INFO nova.compute.manager [-] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Took 2.23 seconds to deallocate network for instance. [ 637.236944] env[69475]: INFO nova.compute.manager [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Rebuilding instance [ 637.248756] env[69475]: DEBUG nova.virt.hardware [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 637.249393] env[69475]: DEBUG nova.virt.hardware [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 637.249585] env[69475]: DEBUG nova.virt.hardware [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 637.251110] env[69475]: DEBUG nova.virt.hardware [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 637.251110] env[69475]: DEBUG nova.virt.hardware [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 637.251110] env[69475]: DEBUG nova.virt.hardware [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 637.251110] env[69475]: DEBUG nova.virt.hardware [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 637.251110] env[69475]: DEBUG nova.virt.hardware [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 637.251432] env[69475]: DEBUG nova.virt.hardware [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 637.251432] env[69475]: DEBUG nova.virt.hardware [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 637.251432] env[69475]: DEBUG nova.virt.hardware [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 637.254438] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ee375e-cf30-41d6-a6e0-a08e6fa68bf5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.266226] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a94b6f7d-41a5-4755-8392-5a4935ae02d3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.318975] env[69475]: DEBUG nova.compute.manager [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 637.320081] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa2a8fc-a47a-4e27-a2a0-6cf3dc6d2b86 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.372220] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': task-3507594, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.379596] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 637.380294] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-91672cdb-0026-43d7-bd86-ddca2e727eb4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.390744] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 637.390744] env[69475]: value = "task-3507595" [ 637.390744] env[69475]: _type = "Task" [ 637.390744] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.399599] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507595, 'name': CloneVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.455314] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520dfb4d-a45f-1dbb-36a4-09c3281ced16, 'name': SearchDatastore_Task, 'duration_secs': 0.027135} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.455433] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 637.456121] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 637.456121] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.456121] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 637.456288] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 637.458032] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2d55fa2-4d08-4880-8d5b-3683fbc1c33c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.478032] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 637.478363] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 637.479220] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41e44cca-bbf5-40ed-9fda-2c3636b0f235 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.487574] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 637.487574] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525827d3-4eb2-a7ab-c07e-7cbb8874d518" [ 637.487574] env[69475]: _type = "Task" [ 637.487574] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.505151] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525827d3-4eb2-a7ab-c07e-7cbb8874d518, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.585684] env[69475]: DEBUG nova.network.neutron [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 637.628955] env[69475]: DEBUG nova.network.neutron [req-e9a73878-ec44-4b2a-a279-85a8b6d294d6 req-61c087e0-1340-4b1c-a9a4-d56a511946f8 service nova] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Updated VIF entry in instance network info cache for port 267ce176-9932-4001-a96f-4e89c511fca3. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 637.629353] env[69475]: DEBUG nova.network.neutron [req-e9a73878-ec44-4b2a-a279-85a8b6d294d6 req-61c087e0-1340-4b1c-a9a4-d56a511946f8 service nova] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Updating instance_info_cache with network_info: [{"id": "267ce176-9932-4001-a96f-4e89c511fca3", "address": "fa:16:3e:58:e7:62", "network": {"id": "b99233c7-e1eb-4d5b-ba20-9795ba71b047", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-227500057-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.146", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2b8dc36bed3a4bf5ae76fbc0c3c252a9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap267ce176-99", "ovs_interfaceid": "267ce176-9932-4001-a96f-4e89c511fca3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.659260] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.475s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.659857] env[69475]: DEBUG nova.compute.manager [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 637.663597] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.453s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 637.663819] env[69475]: DEBUG nova.objects.instance [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Lazy-loading 'resources' on Instance uuid e1ecc905-22da-434a-8ddf-a66f88ab47fb {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 637.719768] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 637.836586] env[69475]: DEBUG nova.compute.manager [req-5650c347-816e-41e5-ad83-b1627b4817bb req-cba3cc22-6c5a-4aef-98e3-3223af05a253 service nova] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Received event network-vif-plugged-9e51856c-7355-448c-82fc-e5af23bb0fcf {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 637.836586] env[69475]: DEBUG oslo_concurrency.lockutils [req-5650c347-816e-41e5-ad83-b1627b4817bb req-cba3cc22-6c5a-4aef-98e3-3223af05a253 service nova] Acquiring lock "235653ac-a893-4f42-a394-dd81f61f0d73-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 637.836586] env[69475]: DEBUG oslo_concurrency.lockutils [req-5650c347-816e-41e5-ad83-b1627b4817bb req-cba3cc22-6c5a-4aef-98e3-3223af05a253 service nova] Lock "235653ac-a893-4f42-a394-dd81f61f0d73-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 637.836586] env[69475]: DEBUG oslo_concurrency.lockutils [req-5650c347-816e-41e5-ad83-b1627b4817bb req-cba3cc22-6c5a-4aef-98e3-3223af05a253 service nova] Lock "235653ac-a893-4f42-a394-dd81f61f0d73-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.836586] env[69475]: DEBUG nova.compute.manager [req-5650c347-816e-41e5-ad83-b1627b4817bb req-cba3cc22-6c5a-4aef-98e3-3223af05a253 service nova] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] No waiting events found dispatching network-vif-plugged-9e51856c-7355-448c-82fc-e5af23bb0fcf {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 637.836763] env[69475]: WARNING nova.compute.manager [req-5650c347-816e-41e5-ad83-b1627b4817bb req-cba3cc22-6c5a-4aef-98e3-3223af05a253 service nova] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Received unexpected event network-vif-plugged-9e51856c-7355-448c-82fc-e5af23bb0fcf for instance with vm_state building and task_state spawning. [ 637.836763] env[69475]: DEBUG nova.compute.manager [req-5650c347-816e-41e5-ad83-b1627b4817bb req-cba3cc22-6c5a-4aef-98e3-3223af05a253 service nova] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Received event network-vif-deleted-d37fa2fb-69e3-46f1-a6a6-1794a902a86e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 637.836763] env[69475]: DEBUG nova.compute.manager [req-5650c347-816e-41e5-ad83-b1627b4817bb req-cba3cc22-6c5a-4aef-98e3-3223af05a253 service nova] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Received event network-changed-9e51856c-7355-448c-82fc-e5af23bb0fcf {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 637.836763] env[69475]: DEBUG nova.compute.manager [req-5650c347-816e-41e5-ad83-b1627b4817bb req-cba3cc22-6c5a-4aef-98e3-3223af05a253 service nova] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Refreshing instance network info cache due to event network-changed-9e51856c-7355-448c-82fc-e5af23bb0fcf. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 637.836763] env[69475]: DEBUG oslo_concurrency.lockutils [req-5650c347-816e-41e5-ad83-b1627b4817bb req-cba3cc22-6c5a-4aef-98e3-3223af05a253 service nova] Acquiring lock "refresh_cache-235653ac-a893-4f42-a394-dd81f61f0d73" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.872963] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': task-3507594, 'name': ReconfigVM_Task, 'duration_secs': 0.907636} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.873344] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Reconfigured VM instance instance-0000000e to attach disk [datastore1] a22a4d65-56eb-4313-bd0e-81148981f5b8/a22a4d65-56eb-4313-bd0e-81148981f5b8.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 637.873951] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a9d1fcc8-bf14-440d-badf-d85818acb3fa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.880891] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Waiting for the task: (returnval){ [ 637.880891] env[69475]: value = "task-3507596" [ 637.880891] env[69475]: _type = "Task" [ 637.880891] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.893353] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': task-3507596, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.901854] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507595, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.998485] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525827d3-4eb2-a7ab-c07e-7cbb8874d518, 'name': SearchDatastore_Task, 'duration_secs': 0.027316} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 637.999497] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-262dad23-c60b-4dc6-8bf0-677fbda27290 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.005673] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 638.005673] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52508e54-0b8b-32de-a8aa-61402c6fd618" [ 638.005673] env[69475]: _type = "Task" [ 638.005673] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.020216] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52508e54-0b8b-32de-a8aa-61402c6fd618, 'name': SearchDatastore_Task, 'duration_secs': 0.010014} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.020468] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.020742] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 93607154-f135-4925-9c3a-a97051535b00/93607154-f135-4925-9c3a-a97051535b00.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 638.020970] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-18d44e05-fbfb-4ce1-8cf0-c29f75a5117e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.035103] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 638.035103] env[69475]: value = "task-3507597" [ 638.035103] env[69475]: _type = "Task" [ 638.035103] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.048709] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507597, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.131855] env[69475]: DEBUG oslo_concurrency.lockutils [req-e9a73878-ec44-4b2a-a279-85a8b6d294d6 req-61c087e0-1340-4b1c-a9a4-d56a511946f8 service nova] Releasing lock "refresh_cache-b255f4d7-b177-4d6c-8a28-dcb5a179c1c0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.168213] env[69475]: DEBUG nova.compute.utils [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 638.174431] env[69475]: DEBUG nova.compute.manager [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 638.174546] env[69475]: DEBUG nova.network.neutron [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 638.254881] env[69475]: DEBUG nova.network.neutron [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Updating instance_info_cache with network_info: [{"id": "9e51856c-7355-448c-82fc-e5af23bb0fcf", "address": "fa:16:3e:f0:13:f6", "network": {"id": "254ff636-2fcb-4d4c-b050-89948230fa0d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2128688610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1784f9c01de49c494bc44e0272c02cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e51856c-73", "ovs_interfaceid": "9e51856c-7355-448c-82fc-e5af23bb0fcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.341291] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 638.341851] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc16206e-21c3-4190-b4c2-854a900bcee9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.350138] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 638.350138] env[69475]: value = "task-3507598" [ 638.350138] env[69475]: _type = "Task" [ 638.350138] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.360677] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507598, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.403932] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': task-3507596, 'name': Rename_Task, 'duration_secs': 0.213218} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.409985] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 638.409985] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507595, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.415499] env[69475]: DEBUG nova.policy [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '70142cccbe764ad792c910a7e7b29584', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1f9ff44ddd0f4b2393e659ba2bd2cfa6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 638.421236] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0bb26272-d0a7-4e60-99a7-47d318738102 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.430848] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Waiting for the task: (returnval){ [ 638.430848] env[69475]: value = "task-3507599" [ 638.430848] env[69475]: _type = "Task" [ 638.430848] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.444189] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': task-3507599, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.550911] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507597, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506184} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.550911] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 93607154-f135-4925-9c3a-a97051535b00/93607154-f135-4925-9c3a-a97051535b00.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 638.550911] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 638.551191] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1a422fbc-3dcb-4609-834f-69536dfd53ba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.561808] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 638.561808] env[69475]: value = "task-3507600" [ 638.561808] env[69475]: _type = "Task" [ 638.561808] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.572492] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507600, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.673780] env[69475]: DEBUG nova.compute.manager [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 638.698169] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc80f29f-3dae-4282-9031-7eb05463dd13 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.710544] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb41889-97a9-4742-86de-d12bb9d3661c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.718287] env[69475]: DEBUG nova.network.neutron [req-b938f805-df27-4f49-88d9-3155b58c7031 req-8a219d32-a2ad-4e94-a69f-9399c70d3589 service nova] [instance: 93607154-f135-4925-9c3a-a97051535b00] Updated VIF entry in instance network info cache for port 29b2e26b-edae-4c53-98e5-15ce643aa4d0. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 638.718683] env[69475]: DEBUG nova.network.neutron [req-b938f805-df27-4f49-88d9-3155b58c7031 req-8a219d32-a2ad-4e94-a69f-9399c70d3589 service nova] [instance: 93607154-f135-4925-9c3a-a97051535b00] Updating instance_info_cache with network_info: [{"id": "29b2e26b-edae-4c53-98e5-15ce643aa4d0", "address": "fa:16:3e:f1:c1:a3", "network": {"id": "7baa3036-0da2-4997-9256-7e09e8122eec", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1554281652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b14737c5edf94580b711ca21258a8811", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29b2e26b-ed", "ovs_interfaceid": "29b2e26b-edae-4c53-98e5-15ce643aa4d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.753708] env[69475]: DEBUG oslo_concurrency.lockutils [req-b938f805-df27-4f49-88d9-3155b58c7031 req-8a219d32-a2ad-4e94-a69f-9399c70d3589 service nova] Releasing lock "refresh_cache-93607154-f135-4925-9c3a-a97051535b00" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.754689] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9711de-f268-463a-ac6b-582543fba720 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.758247] env[69475]: DEBUG oslo_concurrency.lockutils [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Releasing lock "refresh_cache-235653ac-a893-4f42-a394-dd81f61f0d73" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.758521] env[69475]: DEBUG nova.compute.manager [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Instance network_info: |[{"id": "9e51856c-7355-448c-82fc-e5af23bb0fcf", "address": "fa:16:3e:f0:13:f6", "network": {"id": "254ff636-2fcb-4d4c-b050-89948230fa0d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2128688610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1784f9c01de49c494bc44e0272c02cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e51856c-73", "ovs_interfaceid": "9e51856c-7355-448c-82fc-e5af23bb0fcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 638.761983] env[69475]: DEBUG oslo_concurrency.lockutils [req-5650c347-816e-41e5-ad83-b1627b4817bb req-cba3cc22-6c5a-4aef-98e3-3223af05a253 service nova] Acquired lock "refresh_cache-235653ac-a893-4f42-a394-dd81f61f0d73" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.761983] env[69475]: DEBUG nova.network.neutron [req-5650c347-816e-41e5-ad83-b1627b4817bb req-cba3cc22-6c5a-4aef-98e3-3223af05a253 service nova] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Refreshing network info cache for port 9e51856c-7355-448c-82fc-e5af23bb0fcf {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 638.761983] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:13:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31ac3fea-ebf4-4bed-bf70-1eaecdf71280', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e51856c-7355-448c-82fc-e5af23bb0fcf', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 638.769112] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Creating folder: Project (f1784f9c01de49c494bc44e0272c02cf). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 638.773811] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b92121f0-144c-4268-8ed9-b380d7135e65 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.776042] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6730df83-184e-4430-b17f-2c3f60a41015 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.794356] env[69475]: DEBUG nova.compute.provider_tree [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 638.837482] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Created folder: Project (f1784f9c01de49c494bc44e0272c02cf) in parent group-v700823. [ 638.837482] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Creating folder: Instances. Parent ref: group-v700871. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 638.838422] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45816f73-fbdf-4b18-a272-2e5bc17e8315 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.847507] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Created folder: Instances in parent group-v700871. [ 638.847772] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 638.847970] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 638.848220] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-986fa0cd-d1d3-4c60-8905-7833cd1c0261 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.880444] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507598, 'name': PowerOffVM_Task, 'duration_secs': 0.204977} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.880683] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 638.880683] env[69475]: value = "task-3507603" [ 638.880683] env[69475]: _type = "Task" [ 638.880683] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.880921] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 638.881192] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 638.882044] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ec338b-754c-4ea8-989b-e7d1d0f05756 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.893508] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 638.896768] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-90b170a6-fbe4-46ab-ab01-f410103e80fb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.908672] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507595, 'name': CloneVM_Task} progress is 95%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.944771] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': task-3507599, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.973864] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 638.974094] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 638.974273] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleting the datastore file [datastore2] 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 638.974569] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-392c47a6-feaf-4654-b756-ca120d677a07 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.981141] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 638.981141] env[69475]: value = "task-3507605" [ 638.981141] env[69475]: _type = "Task" [ 638.981141] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.992048] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507605, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.075697] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507600, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06422} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.076043] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 639.076851] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad40064-16df-4063-a601-c3f7eb945e9c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.107839] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 93607154-f135-4925-9c3a-a97051535b00/93607154-f135-4925-9c3a-a97051535b00.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 639.107839] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f53e5b8a-f807-4db8-afd9-57a040450adb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.128404] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 639.128404] env[69475]: value = "task-3507606" [ 639.128404] env[69475]: _type = "Task" [ 639.128404] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.138483] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507606, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.298165] env[69475]: DEBUG nova.scheduler.client.report [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 639.396459] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507603, 'name': CreateVM_Task, 'duration_secs': 0.498421} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.396459] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 639.397244] env[69475]: DEBUG oslo_concurrency.lockutils [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.398520] env[69475]: DEBUG oslo_concurrency.lockutils [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 639.398520] env[69475]: DEBUG oslo_concurrency.lockutils [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 639.399616] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c2df4d76-df28-4e17-9e20-69e891142350 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.408655] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 639.408655] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dd108d-8a22-1386-0c88-3fd348bac307" [ 639.408655] env[69475]: _type = "Task" [ 639.408655] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.412412] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507595, 'name': CloneVM_Task, 'duration_secs': 1.628132} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.416316] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Created linked-clone VM from snapshot [ 639.416472] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-701410df-aa09-4408-8803-ca150344ca4c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.423765] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dd108d-8a22-1386-0c88-3fd348bac307, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.429641] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Uploading image 63189104-4113-4184-b6e5-45573f7f5cf2 {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 639.441724] env[69475]: DEBUG oslo_vmware.api [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': task-3507599, 'name': PowerOnVM_Task, 'duration_secs': 0.532965} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.442039] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 639.442292] env[69475]: INFO nova.compute.manager [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Took 9.93 seconds to spawn the instance on the hypervisor. [ 639.442437] env[69475]: DEBUG nova.compute.manager [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 639.443206] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-960e7aa7-5929-49b0-9197-23b6ced3fba9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.446545] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 639.446766] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-313230e6-515e-487d-92fd-741b9e04025d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.456568] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 639.456568] env[69475]: value = "task-3507607" [ 639.456568] env[69475]: _type = "Task" [ 639.456568] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.465305] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507607, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.490972] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507605, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.308805} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.494352] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 639.494537] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 639.495384] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 639.641543] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507606, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.684579] env[69475]: DEBUG nova.compute.manager [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 639.723317] env[69475]: DEBUG nova.virt.hardware [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 639.724104] env[69475]: DEBUG nova.virt.hardware [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 639.724104] env[69475]: DEBUG nova.virt.hardware [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 639.724104] env[69475]: DEBUG nova.virt.hardware [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 639.724104] env[69475]: DEBUG nova.virt.hardware [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 639.725033] env[69475]: DEBUG nova.virt.hardware [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 639.725033] env[69475]: DEBUG nova.virt.hardware [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 639.725175] env[69475]: DEBUG nova.virt.hardware [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 639.725262] env[69475]: DEBUG nova.virt.hardware [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 639.725437] env[69475]: DEBUG nova.virt.hardware [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 639.725672] env[69475]: DEBUG nova.virt.hardware [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 639.726688] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c97a836-ab5c-41ee-aac2-d009fa3e77a0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.737634] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-684e44a8-db49-4e6f-be45-f94fa887d189 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.775054] env[69475]: DEBUG nova.network.neutron [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Successfully created port: 00f2c4f8-7075-42f6-94f1-c09274ea941d {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 639.806925] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.143s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 639.811619] env[69475]: DEBUG oslo_concurrency.lockutils [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.793s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 639.813130] env[69475]: INFO nova.compute.claims [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 639.853193] env[69475]: INFO nova.scheduler.client.report [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Deleted allocations for instance e1ecc905-22da-434a-8ddf-a66f88ab47fb [ 639.935956] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dd108d-8a22-1386-0c88-3fd348bac307, 'name': SearchDatastore_Task, 'duration_secs': 0.036527} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.935956] env[69475]: DEBUG oslo_concurrency.lockutils [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 639.935956] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 639.935956] env[69475]: DEBUG oslo_concurrency.lockutils [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.936378] env[69475]: DEBUG oslo_concurrency.lockutils [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 639.936378] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 639.936378] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98bc7e7e-9db4-45ec-ab68-99353317a7f6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.949916] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 639.949916] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 639.949916] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbc543bb-36ee-43a1-a5dd-5ec8f8257595 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.957131] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 639.957131] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521f8527-5b8c-b53a-f332-2c8f7561e8a3" [ 639.957131] env[69475]: _type = "Task" [ 639.957131] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.980383] env[69475]: INFO nova.compute.manager [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Took 22.61 seconds to build instance. [ 639.988862] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521f8527-5b8c-b53a-f332-2c8f7561e8a3, 'name': SearchDatastore_Task, 'duration_secs': 0.008214} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.999239] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507607, 'name': Destroy_Task, 'duration_secs': 0.453217} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.006362] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d6da3f8-a366-4480-b177-004322654720 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.011189] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Destroyed the VM [ 640.011701] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 640.013216] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-578805c1-afa3-4d1a-a560-b3467f6add5b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.020033] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 640.020033] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f39a5c-e80c-93e5-9657-bc764de27973" [ 640.020033] env[69475]: _type = "Task" [ 640.020033] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.021071] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 640.021071] env[69475]: value = "task-3507608" [ 640.021071] env[69475]: _type = "Task" [ 640.021071] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.037327] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f39a5c-e80c-93e5-9657-bc764de27973, 'name': SearchDatastore_Task, 'duration_secs': 0.012478} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.042136] env[69475]: DEBUG oslo_concurrency.lockutils [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 640.042511] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 235653ac-a893-4f42-a394-dd81f61f0d73/235653ac-a893-4f42-a394-dd81f61f0d73.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 640.042854] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507608, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.043128] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e59c8cc5-3d27-45ad-ac99-39f62bab7652 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.049687] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 640.049687] env[69475]: value = "task-3507609" [ 640.049687] env[69475]: _type = "Task" [ 640.049687] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.063537] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507609, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.121792] env[69475]: DEBUG nova.network.neutron [req-5650c347-816e-41e5-ad83-b1627b4817bb req-cba3cc22-6c5a-4aef-98e3-3223af05a253 service nova] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Updated VIF entry in instance network info cache for port 9e51856c-7355-448c-82fc-e5af23bb0fcf. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 640.122152] env[69475]: DEBUG nova.network.neutron [req-5650c347-816e-41e5-ad83-b1627b4817bb req-cba3cc22-6c5a-4aef-98e3-3223af05a253 service nova] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Updating instance_info_cache with network_info: [{"id": "9e51856c-7355-448c-82fc-e5af23bb0fcf", "address": "fa:16:3e:f0:13:f6", "network": {"id": "254ff636-2fcb-4d4c-b050-89948230fa0d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2128688610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1784f9c01de49c494bc44e0272c02cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e51856c-73", "ovs_interfaceid": "9e51856c-7355-448c-82fc-e5af23bb0fcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.140540] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507606, 'name': ReconfigVM_Task, 'duration_secs': 0.569417} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.140792] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 93607154-f135-4925-9c3a-a97051535b00/93607154-f135-4925-9c3a-a97051535b00.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 640.141645] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2575993e-d192-4b2f-a76d-529a809c9117 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.148593] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 640.148593] env[69475]: value = "task-3507610" [ 640.148593] env[69475]: _type = "Task" [ 640.148593] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.159491] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507610, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.364534] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7ced3f6d-46cd-40f9-adb1-9d9c8934ed24 tempest-ServerDiagnosticsTest-878497450 tempest-ServerDiagnosticsTest-878497450-project-member] Lock "e1ecc905-22da-434a-8ddf-a66f88ab47fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.037s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 640.484671] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5b78456e-9c70-494c-bb69-25d153d2b21a tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Lock "a22a4d65-56eb-4313-bd0e-81148981f5b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.128s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 640.538420] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507608, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.550224] env[69475]: DEBUG nova.virt.hardware [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 640.550224] env[69475]: DEBUG nova.virt.hardware [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 640.550224] env[69475]: DEBUG nova.virt.hardware [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 640.550396] env[69475]: DEBUG nova.virt.hardware [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 640.550396] env[69475]: DEBUG nova.virt.hardware [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 640.550396] env[69475]: DEBUG nova.virt.hardware [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 640.550396] env[69475]: DEBUG nova.virt.hardware [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 640.550396] env[69475]: DEBUG nova.virt.hardware [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 640.550900] env[69475]: DEBUG nova.virt.hardware [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 640.551472] env[69475]: DEBUG nova.virt.hardware [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 640.554620] env[69475]: DEBUG nova.virt.hardware [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 640.554620] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f74a98-50f6-4e07-9bfc-5ee5203e2553 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.570729] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4437e38a-abdf-4667-b035-ae5d68b35644 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.574972] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507609, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.492722} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.575438] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 235653ac-a893-4f42-a394-dd81f61f0d73/235653ac-a893-4f42-a394-dd81f61f0d73.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 640.576855] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 640.576855] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a59f1f78-dc40-4d25-aa56-3df5ac64acc0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.587490] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:f3:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e23c1d18-c841-49ea-95f3-df5ceac28afd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '242cecca-1cdb-42f1-92c0-0717cd78b7eb', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 640.595616] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 640.596459] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 640.596732] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-51bbed41-d5d5-4fd8-8aa9-1b8b16627cd2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.615284] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 640.615284] env[69475]: value = "task-3507611" [ 640.615284] env[69475]: _type = "Task" [ 640.615284] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.622030] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 640.622030] env[69475]: value = "task-3507612" [ 640.622030] env[69475]: _type = "Task" [ 640.622030] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.628599] env[69475]: DEBUG oslo_concurrency.lockutils [req-5650c347-816e-41e5-ad83-b1627b4817bb req-cba3cc22-6c5a-4aef-98e3-3223af05a253 service nova] Releasing lock "refresh_cache-235653ac-a893-4f42-a394-dd81f61f0d73" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 640.630380] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507611, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.634180] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507612, 'name': CreateVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.660619] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507610, 'name': Rename_Task, 'duration_secs': 0.435084} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.661204] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 640.661204] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd824698-8a5d-468b-9b5d-719c7319c747 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.668033] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 640.668033] env[69475]: value = "task-3507613" [ 640.668033] env[69475]: _type = "Task" [ 640.668033] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.678606] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507613, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.991561] env[69475]: DEBUG nova.compute.manager [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 641.047299] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507608, 'name': RemoveSnapshot_Task} progress is 98%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.076584] env[69475]: DEBUG nova.network.neutron [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Successfully updated port: 65a50486-30b1-4098-94d5-abba26c7c25b {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 641.136175] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507611, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078073} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.147628] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 641.150560] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507612, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.150560] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf51249b-eec6-4e89-a29e-c7907064cfdb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.183299] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 235653ac-a893-4f42-a394-dd81f61f0d73/235653ac-a893-4f42-a394-dd81f61f0d73.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 641.189921] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc33524d-40c0-4fb7-8a1a-b26c7fec0c18 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.224131] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507613, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.225839] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 641.225839] env[69475]: value = "task-3507614" [ 641.225839] env[69475]: _type = "Task" [ 641.225839] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.238716] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507614, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.362044] env[69475]: DEBUG nova.compute.manager [req-1f3a6963-d743-4c41-80ea-8f13f5497ae3 req-1b77840e-39fb-4baf-958c-c4f20e1b0b24 service nova] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Received event network-vif-plugged-65a50486-30b1-4098-94d5-abba26c7c25b {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 641.362193] env[69475]: DEBUG oslo_concurrency.lockutils [req-1f3a6963-d743-4c41-80ea-8f13f5497ae3 req-1b77840e-39fb-4baf-958c-c4f20e1b0b24 service nova] Acquiring lock "a21ec73a-2658-4fc6-9bc1-0e492385d59e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.362372] env[69475]: DEBUG oslo_concurrency.lockutils [req-1f3a6963-d743-4c41-80ea-8f13f5497ae3 req-1b77840e-39fb-4baf-958c-c4f20e1b0b24 service nova] Lock "a21ec73a-2658-4fc6-9bc1-0e492385d59e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.362530] env[69475]: DEBUG oslo_concurrency.lockutils [req-1f3a6963-d743-4c41-80ea-8f13f5497ae3 req-1b77840e-39fb-4baf-958c-c4f20e1b0b24 service nova] Lock "a21ec73a-2658-4fc6-9bc1-0e492385d59e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 641.365111] env[69475]: DEBUG nova.compute.manager [req-1f3a6963-d743-4c41-80ea-8f13f5497ae3 req-1b77840e-39fb-4baf-958c-c4f20e1b0b24 service nova] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] No waiting events found dispatching network-vif-plugged-65a50486-30b1-4098-94d5-abba26c7c25b {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 641.365111] env[69475]: WARNING nova.compute.manager [req-1f3a6963-d743-4c41-80ea-8f13f5497ae3 req-1b77840e-39fb-4baf-958c-c4f20e1b0b24 service nova] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Received unexpected event network-vif-plugged-65a50486-30b1-4098-94d5-abba26c7c25b for instance with vm_state building and task_state spawning. [ 641.425703] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1f7676-3423-4dc1-8170-9bafb6b1d3e0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.434301] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3656f50a-6bb7-4ba3-9814-94fd2c439bed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.469281] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9e7a47-97ba-4c9a-90e5-20bdbd24d392 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.479682] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e11b669-081d-4379-9f50-3073bb4ee2a4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.497057] env[69475]: DEBUG nova.compute.provider_tree [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 641.524530] env[69475]: DEBUG oslo_concurrency.lockutils [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 641.536506] env[69475]: DEBUG oslo_vmware.api [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507608, 'name': RemoveSnapshot_Task, 'duration_secs': 1.172328} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.536770] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 641.580762] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "refresh_cache-a21ec73a-2658-4fc6-9bc1-0e492385d59e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.580909] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired lock "refresh_cache-a21ec73a-2658-4fc6-9bc1-0e492385d59e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.581101] env[69475]: DEBUG nova.network.neutron [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 641.640253] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507612, 'name': CreateVM_Task, 'duration_secs': 0.623134} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.640253] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 641.640927] env[69475]: DEBUG oslo_concurrency.lockutils [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 641.641062] env[69475]: DEBUG oslo_concurrency.lockutils [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 641.641431] env[69475]: DEBUG oslo_concurrency.lockutils [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 641.642030] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8313b07-4056-4754-baa8-95eb1c304748 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.647256] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 641.647256] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525517c6-d078-d92c-0359-a2e10ed497b6" [ 641.647256] env[69475]: _type = "Task" [ 641.647256] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.657577] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525517c6-d078-d92c-0359-a2e10ed497b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.689443] env[69475]: DEBUG oslo_vmware.api [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507613, 'name': PowerOnVM_Task, 'duration_secs': 0.640892} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.689602] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 641.689753] env[69475]: INFO nova.compute.manager [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Took 9.66 seconds to spawn the instance on the hypervisor. [ 641.690238] env[69475]: DEBUG nova.compute.manager [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 641.690815] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1304da2e-d8a2-4c87-9ebe-be38a5f53d26 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.740750] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507614, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.003889] env[69475]: DEBUG nova.scheduler.client.report [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 642.045775] env[69475]: WARNING nova.compute.manager [None req-973de125-c05d-426b-aa04-3bf82d1a545c tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Image not found during snapshot: nova.exception.ImageNotFound: Image 63189104-4113-4184-b6e5-45573f7f5cf2 could not be found. [ 642.158265] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525517c6-d078-d92c-0359-a2e10ed497b6, 'name': SearchDatastore_Task, 'duration_secs': 0.025646} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.158662] env[69475]: DEBUG oslo_concurrency.lockutils [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 642.159384] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 642.159384] env[69475]: DEBUG oslo_concurrency.lockutils [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.159384] env[69475]: DEBUG oslo_concurrency.lockutils [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 642.159384] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 642.160590] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-753e8bbd-c802-4e0f-a4c3-4a042b742c08 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.168715] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 642.169043] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 642.169664] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e396773e-e6c8-4efb-8abd-4a506b3f852f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.174991] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 642.174991] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527be04e-8dec-3a87-d5ba-d32783129c26" [ 642.174991] env[69475]: _type = "Task" [ 642.174991] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.183124] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527be04e-8dec-3a87-d5ba-d32783129c26, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.183937] env[69475]: DEBUG nova.network.neutron [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 642.217950] env[69475]: INFO nova.compute.manager [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Took 20.58 seconds to build instance. [ 642.240284] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507614, 'name': ReconfigVM_Task, 'duration_secs': 0.591753} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.241033] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 235653ac-a893-4f42-a394-dd81f61f0d73/235653ac-a893-4f42-a394-dd81f61f0d73.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 642.241352] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-53423f3d-b5b3-4961-9ad4-072cabaeb6a1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.255562] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 642.255562] env[69475]: value = "task-3507615" [ 642.255562] env[69475]: _type = "Task" [ 642.255562] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.264534] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507615, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.510127] env[69475]: DEBUG oslo_concurrency.lockutils [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.698s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 642.511011] env[69475]: DEBUG nova.compute.manager [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 642.513948] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.382s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 642.515322] env[69475]: INFO nova.compute.claims [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 642.686575] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527be04e-8dec-3a87-d5ba-d32783129c26, 'name': SearchDatastore_Task, 'duration_secs': 0.012159} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.687559] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90ee41ad-2a73-44a8-b5a0-e4ba6466f2fd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.693822] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 642.693822] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ae1009-9c35-22a3-324a-df8b6411220a" [ 642.693822] env[69475]: _type = "Task" [ 642.693822] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.705020] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ae1009-9c35-22a3-324a-df8b6411220a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.722053] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7270e365-a6cd-4220-a038-98f3e247cf4d tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "93607154-f135-4925-9c3a-a97051535b00" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.092s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 642.765497] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507615, 'name': Rename_Task, 'duration_secs': 0.213842} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.765789] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 642.766117] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a39d32b-ba9f-45d7-adb6-f59481f9c931 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.773756] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 642.773756] env[69475]: value = "task-3507616" [ 642.773756] env[69475]: _type = "Task" [ 642.773756] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.784924] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507616, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.788175] env[69475]: DEBUG nova.network.neutron [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Updating instance_info_cache with network_info: [{"id": "65a50486-30b1-4098-94d5-abba26c7c25b", "address": "fa:16:3e:2f:72:b2", "network": {"id": "254ff636-2fcb-4d4c-b050-89948230fa0d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2128688610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1784f9c01de49c494bc44e0272c02cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65a50486-30", "ovs_interfaceid": "65a50486-30b1-4098-94d5-abba26c7c25b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.025117] env[69475]: DEBUG nova.compute.utils [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 643.033736] env[69475]: DEBUG nova.compute.manager [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 643.033736] env[69475]: DEBUG nova.network.neutron [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 643.207305] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ae1009-9c35-22a3-324a-df8b6411220a, 'name': SearchDatastore_Task, 'duration_secs': 0.037232} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.207588] env[69475]: DEBUG oslo_concurrency.lockutils [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 643.207923] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3/7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 643.208267] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1db41148-ebc9-4546-bff4-bc6dbc526995 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.217915] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 643.217915] env[69475]: value = "task-3507617" [ 643.217915] env[69475]: _type = "Task" [ 643.217915] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.226993] env[69475]: DEBUG nova.compute.manager [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 643.232472] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507617, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.286686] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507616, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.288750] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Releasing lock "refresh_cache-a21ec73a-2658-4fc6-9bc1-0e492385d59e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 643.289269] env[69475]: DEBUG nova.compute.manager [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Instance network_info: |[{"id": "65a50486-30b1-4098-94d5-abba26c7c25b", "address": "fa:16:3e:2f:72:b2", "network": {"id": "254ff636-2fcb-4d4c-b050-89948230fa0d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2128688610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1784f9c01de49c494bc44e0272c02cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65a50486-30", "ovs_interfaceid": "65a50486-30b1-4098-94d5-abba26c7c25b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 643.289967] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:72:b2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31ac3fea-ebf4-4bed-bf70-1eaecdf71280', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '65a50486-30b1-4098-94d5-abba26c7c25b', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 643.299356] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 643.300138] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 643.300772] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-05161152-aac5-4dd0-9639-1f575357a525 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.322593] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 643.322593] env[69475]: value = "task-3507618" [ 643.322593] env[69475]: _type = "Task" [ 643.322593] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.331547] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507618, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.449272] env[69475]: DEBUG nova.policy [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '411c7e7bcdee495697199023e10202fe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '47bcbe5bc3a14fbf9ea9617ea7d50342', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 643.534152] env[69475]: DEBUG nova.compute.manager [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 643.735375] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "fa2ca135-3cd2-411e-b1fc-35b93a97e75d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.735375] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "fa2ca135-3cd2-411e-b1fc-35b93a97e75d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 643.744969] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507617, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.762827] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.787038] env[69475]: DEBUG oslo_vmware.api [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507616, 'name': PowerOnVM_Task, 'duration_secs': 0.805161} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.787038] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 643.787038] env[69475]: INFO nova.compute.manager [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Took 9.10 seconds to spawn the instance on the hypervisor. [ 643.787038] env[69475]: DEBUG nova.compute.manager [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 643.788161] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b5139c-bdea-4da6-9950-67fa3738cb01 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.841674] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507618, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.949815] env[69475]: DEBUG oslo_concurrency.lockutils [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "9e2d4d61-71ed-447a-b28e-c29c5bd8d763" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 643.950056] env[69475]: DEBUG oslo_concurrency.lockutils [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "9e2d4d61-71ed-447a-b28e-c29c5bd8d763" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 644.133698] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a9cd14-95d4-46b8-b938-350866135fc9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.141993] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b45828-db89-480d-b043-435e25d9bd6c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.175898] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78dc973f-3bb8-4c71-a801-63a2d2ee1e3e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.185439] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b11b007-7fa3-458a-a667-90fe5fdc60b5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.204724] env[69475]: DEBUG nova.compute.provider_tree [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.232143] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507617, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.59696} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.232543] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3/7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 644.232791] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 644.233126] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0dd96559-467e-486b-903e-1f9328b553b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.239780] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 644.239780] env[69475]: value = "task-3507619" [ 644.239780] env[69475]: _type = "Task" [ 644.239780] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.252140] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507619, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.308907] env[69475]: INFO nova.compute.manager [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Took 22.41 seconds to build instance. [ 644.336848] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507618, 'name': CreateVM_Task, 'duration_secs': 0.528736} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.337090] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 644.337782] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.338034] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.338375] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 644.338639] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5466a47c-8af5-4fa1-8820-e3fb97a29435 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.343836] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 644.343836] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5266e0fd-4d9b-a3c8-0ccc-2f32efbb9fe5" [ 644.343836] env[69475]: _type = "Task" [ 644.343836] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.358020] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5266e0fd-4d9b-a3c8-0ccc-2f32efbb9fe5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.551402] env[69475]: DEBUG nova.compute.manager [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 644.574147] env[69475]: DEBUG nova.network.neutron [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Successfully updated port: 00f2c4f8-7075-42f6-94f1-c09274ea941d {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 644.593579] env[69475]: DEBUG nova.virt.hardware [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 644.593945] env[69475]: DEBUG nova.virt.hardware [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 644.594211] env[69475]: DEBUG nova.virt.hardware [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 644.594449] env[69475]: DEBUG nova.virt.hardware [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 644.595209] env[69475]: DEBUG nova.virt.hardware [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 644.595645] env[69475]: DEBUG nova.virt.hardware [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 644.595951] env[69475]: DEBUG nova.virt.hardware [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 644.596220] env[69475]: DEBUG nova.virt.hardware [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 644.596895] env[69475]: DEBUG nova.virt.hardware [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 644.597125] env[69475]: DEBUG nova.virt.hardware [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 644.597329] env[69475]: DEBUG nova.virt.hardware [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 644.600056] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fee709f-52ac-4a6e-a2b7-9948bcd52953 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.620674] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb22d9e-10bf-4371-b4bc-05ebefbcfd4b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.716297] env[69475]: DEBUG nova.scheduler.client.report [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 644.759573] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507619, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065844} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.761236] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 644.761236] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4250f686-edca-49c7-afd4-f486a268be46 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.803941] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3/7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 644.804274] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b506c96-4616-49d8-b434-1813e69104f4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.819921] env[69475]: DEBUG oslo_concurrency.lockutils [None req-44edd409-b1c8-480d-bbe3-fc931a3bed22 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "235653ac-a893-4f42-a394-dd81f61f0d73" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.941s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 644.827369] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 644.827369] env[69475]: value = "task-3507620" [ 644.827369] env[69475]: _type = "Task" [ 644.827369] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.835239] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507620, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.857665] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5266e0fd-4d9b-a3c8-0ccc-2f32efbb9fe5, 'name': SearchDatastore_Task, 'duration_secs': 0.031211} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.858018] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 644.858272] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 644.858551] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.858667] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.858843] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 644.859114] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e37cf828-ac95-4373-91ff-22a529775e0a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.873050] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 644.873254] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 644.873983] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25fa0607-c66d-40c7-8b5e-2ddf888cc1a8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.880338] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 644.880338] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a42398-ba38-ae2d-3833-3af22998abda" [ 644.880338] env[69475]: _type = "Task" [ 644.880338] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.891360] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a42398-ba38-ae2d-3833-3af22998abda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.984836] env[69475]: DEBUG nova.network.neutron [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Successfully created port: 4bac6b08-29dc-45f8-bd32-4adb28c6ea48 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 645.082620] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquiring lock "refresh_cache-c3db35f4-f43d-464c-9556-18a90866ee6a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.082842] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquired lock "refresh_cache-c3db35f4-f43d-464c-9556-18a90866ee6a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.083067] env[69475]: DEBUG nova.network.neutron [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 645.226024] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.712s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 645.226596] env[69475]: DEBUG nova.compute.manager [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 645.231673] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.974s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.236887] env[69475]: DEBUG nova.objects.instance [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Lazy-loading 'resources' on Instance uuid 48bc79bc-df56-4523-808f-a71b391062b9 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 645.323283] env[69475]: DEBUG nova.compute.manager [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 645.338944] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507620, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.395641] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a42398-ba38-ae2d-3833-3af22998abda, 'name': SearchDatastore_Task, 'duration_secs': 0.02759} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.396876] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f1f5a46-a97e-4393-84fb-eed5416353e6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.403477] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 645.403477] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524b0c1f-6b37-dbcf-9cb1-97e62f5fa543" [ 645.403477] env[69475]: _type = "Task" [ 645.403477] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.411248] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524b0c1f-6b37-dbcf-9cb1-97e62f5fa543, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.489070] env[69475]: DEBUG nova.compute.manager [req-04b45a78-dd4b-4cbf-9845-39bc5647a98a req-5b2991a2-3ed5-420b-9e51-c2dada1eafd0 service nova] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Received event network-changed-65a50486-30b1-4098-94d5-abba26c7c25b {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 645.489070] env[69475]: DEBUG nova.compute.manager [req-04b45a78-dd4b-4cbf-9845-39bc5647a98a req-5b2991a2-3ed5-420b-9e51-c2dada1eafd0 service nova] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Refreshing instance network info cache due to event network-changed-65a50486-30b1-4098-94d5-abba26c7c25b. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 645.489070] env[69475]: DEBUG oslo_concurrency.lockutils [req-04b45a78-dd4b-4cbf-9845-39bc5647a98a req-5b2991a2-3ed5-420b-9e51-c2dada1eafd0 service nova] Acquiring lock "refresh_cache-a21ec73a-2658-4fc6-9bc1-0e492385d59e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 645.489070] env[69475]: DEBUG oslo_concurrency.lockutils [req-04b45a78-dd4b-4cbf-9845-39bc5647a98a req-5b2991a2-3ed5-420b-9e51-c2dada1eafd0 service nova] Acquired lock "refresh_cache-a21ec73a-2658-4fc6-9bc1-0e492385d59e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 645.489070] env[69475]: DEBUG nova.network.neutron [req-04b45a78-dd4b-4cbf-9845-39bc5647a98a req-5b2991a2-3ed5-420b-9e51-c2dada1eafd0 service nova] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Refreshing network info cache for port 65a50486-30b1-4098-94d5-abba26c7c25b {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 645.651429] env[69475]: DEBUG nova.network.neutron [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 645.740557] env[69475]: DEBUG nova.compute.utils [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 645.741466] env[69475]: DEBUG nova.compute.manager [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 645.741624] env[69475]: DEBUG nova.network.neutron [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 645.854060] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507620, 'name': ReconfigVM_Task, 'duration_secs': 0.705451} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.859185] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3/7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 645.859185] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-85c50de9-8580-4735-a307-0bf8dd88db8c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.863851] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 645.863851] env[69475]: value = "task-3507621" [ 645.863851] env[69475]: _type = "Task" [ 645.863851] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.867691] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.875799] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507621, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.918447] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524b0c1f-6b37-dbcf-9cb1-97e62f5fa543, 'name': SearchDatastore_Task, 'duration_secs': 0.025056} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.918734] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.918982] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] a21ec73a-2658-4fc6-9bc1-0e492385d59e/a21ec73a-2658-4fc6-9bc1-0e492385d59e.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 645.919261] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db121286-2078-4ea1-b0d5-d892b744d4d4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.928846] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 645.928846] env[69475]: value = "task-3507622" [ 645.928846] env[69475]: _type = "Task" [ 645.928846] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.940764] env[69475]: DEBUG nova.policy [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b45f347050704253802c1dd0c78fc09c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a21fd326b7d4424eba76e774925f5beb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 645.951410] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507622, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.250655] env[69475]: DEBUG nova.compute.manager [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 646.309224] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0541c5f2-07c3-4f88-ac9b-2e70397ceb0a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.319452] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0274bec9-7cca-49e7-aae9-3fa3688c01b7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.357273] env[69475]: DEBUG oslo_concurrency.lockutils [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "91d5b0db-63a5-4290-af9b-264a5ce4cd95" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.357530] env[69475]: DEBUG oslo_concurrency.lockutils [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "91d5b0db-63a5-4290-af9b-264a5ce4cd95" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.357763] env[69475]: DEBUG oslo_concurrency.lockutils [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "91d5b0db-63a5-4290-af9b-264a5ce4cd95-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.357892] env[69475]: DEBUG oslo_concurrency.lockutils [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "91d5b0db-63a5-4290-af9b-264a5ce4cd95-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.358101] env[69475]: DEBUG oslo_concurrency.lockutils [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "91d5b0db-63a5-4290-af9b-264a5ce4cd95-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 646.360516] env[69475]: INFO nova.compute.manager [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Terminating instance [ 646.362588] env[69475]: DEBUG nova.network.neutron [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Updating instance_info_cache with network_info: [{"id": "00f2c4f8-7075-42f6-94f1-c09274ea941d", "address": "fa:16:3e:14:ee:c4", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00f2c4f8-70", "ovs_interfaceid": "00f2c4f8-7075-42f6-94f1-c09274ea941d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 646.364232] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb2f4b41-0490-42b7-a10b-68988e1ce6e2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.382242] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507621, 'name': Rename_Task, 'duration_secs': 0.178448} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.382351] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 646.383615] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf44990-248c-446c-a75f-7ae551c74b7f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.391754] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cbf403ea-8065-4b87-8b55-307d39480218 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.401668] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 646.401668] env[69475]: value = "task-3507623" [ 646.401668] env[69475]: _type = "Task" [ 646.401668] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.410551] env[69475]: DEBUG nova.compute.provider_tree [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 646.421536] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507623, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.445709] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507622, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.869093] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Releasing lock "refresh_cache-c3db35f4-f43d-464c-9556-18a90866ee6a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 646.870177] env[69475]: DEBUG nova.compute.manager [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Instance network_info: |[{"id": "00f2c4f8-7075-42f6-94f1-c09274ea941d", "address": "fa:16:3e:14:ee:c4", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00f2c4f8-70", "ovs_interfaceid": "00f2c4f8-7075-42f6-94f1-c09274ea941d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 646.870532] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:14:ee:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '00f2c4f8-7075-42f6-94f1-c09274ea941d', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 646.882112] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 646.885479] env[69475]: DEBUG nova.compute.manager [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 646.885479] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 646.885999] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 646.888031] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e9cae0-7709-495f-844a-4777be3f523b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.894374] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66fd93f6-e0fc-47ed-aa19-1b1d44d99318 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.918790] env[69475]: DEBUG nova.scheduler.client.report [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 646.926340] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 646.933489] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb5433f3-bede-4f08-91d9-6c8c1973f86c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.934309] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 646.934309] env[69475]: value = "task-3507624" [ 646.934309] env[69475]: _type = "Task" [ 646.934309] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.937658] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507623, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.949031] env[69475]: DEBUG oslo_vmware.api [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 646.949031] env[69475]: value = "task-3507625" [ 646.949031] env[69475]: _type = "Task" [ 646.949031] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.962284] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507624, 'name': CreateVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.962284] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507622, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.573452} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.962284] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] a21ec73a-2658-4fc6-9bc1-0e492385d59e/a21ec73a-2658-4fc6-9bc1-0e492385d59e.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 646.962284] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 646.962284] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-201fce70-7539-4818-ac4a-169464ecde1a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.968416] env[69475]: DEBUG oslo_vmware.api [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507625, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.972936] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 646.972936] env[69475]: value = "task-3507626" [ 646.972936] env[69475]: _type = "Task" [ 646.972936] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.983966] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507626, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.265866] env[69475]: DEBUG nova.compute.manager [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 647.312634] env[69475]: DEBUG nova.virt.hardware [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 647.312634] env[69475]: DEBUG nova.virt.hardware [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 647.312634] env[69475]: DEBUG nova.virt.hardware [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 647.312928] env[69475]: DEBUG nova.virt.hardware [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 647.312928] env[69475]: DEBUG nova.virt.hardware [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 647.312928] env[69475]: DEBUG nova.virt.hardware [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 647.313038] env[69475]: DEBUG nova.virt.hardware [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 647.313173] env[69475]: DEBUG nova.virt.hardware [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 647.313342] env[69475]: DEBUG nova.virt.hardware [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 647.313504] env[69475]: DEBUG nova.virt.hardware [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 647.313755] env[69475]: DEBUG nova.virt.hardware [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 647.314591] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63768ae-54f6-4fa2-8139-7fda651eaa2d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.324710] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ece6ae8-e897-4147-95fa-f5bd1560b4f7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.426240] env[69475]: DEBUG oslo_vmware.api [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507623, 'name': PowerOnVM_Task, 'duration_secs': 0.602151} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.427200] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 647.427200] env[69475]: DEBUG nova.compute.manager [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 647.428511] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46184df6-b3f8-479b-b2ae-23cffaaef3ab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.432834] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.201s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.440307] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.624s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.442504] env[69475]: INFO nova.compute.claims [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 647.449365] env[69475]: DEBUG nova.network.neutron [req-04b45a78-dd4b-4cbf-9845-39bc5647a98a req-5b2991a2-3ed5-420b-9e51-c2dada1eafd0 service nova] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Updated VIF entry in instance network info cache for port 65a50486-30b1-4098-94d5-abba26c7c25b. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 647.449365] env[69475]: DEBUG nova.network.neutron [req-04b45a78-dd4b-4cbf-9845-39bc5647a98a req-5b2991a2-3ed5-420b-9e51-c2dada1eafd0 service nova] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Updating instance_info_cache with network_info: [{"id": "65a50486-30b1-4098-94d5-abba26c7c25b", "address": "fa:16:3e:2f:72:b2", "network": {"id": "254ff636-2fcb-4d4c-b050-89948230fa0d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2128688610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1784f9c01de49c494bc44e0272c02cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65a50486-30", "ovs_interfaceid": "65a50486-30b1-4098-94d5-abba26c7c25b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.470011] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507624, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.470504] env[69475]: DEBUG oslo_vmware.api [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507625, 'name': PowerOffVM_Task, 'duration_secs': 0.335189} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.471496] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 647.471625] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 647.472023] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d097fd3f-e741-451e-800c-5003fdd5483f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.475026] env[69475]: INFO nova.scheduler.client.report [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Deleted allocations for instance 48bc79bc-df56-4523-808f-a71b391062b9 [ 647.489440] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507626, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078187} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.489903] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 647.490740] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a1f8b6b-f4bd-4bde-b6b0-53409624b9aa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.516394] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] a21ec73a-2658-4fc6-9bc1-0e492385d59e/a21ec73a-2658-4fc6-9bc1-0e492385d59e.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 647.517375] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa2059f5-34c1-46a9-af11-595f27665c3c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.545520] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 647.545520] env[69475]: value = "task-3507628" [ 647.545520] env[69475]: _type = "Task" [ 647.545520] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.545520] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 647.545520] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 647.545520] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Deleting the datastore file [datastore1] 91d5b0db-63a5-4290-af9b-264a5ce4cd95 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 647.551076] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-af19d094-021a-44a7-b6c3-7c281fe31266 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.561976] env[69475]: DEBUG oslo_vmware.api [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 647.561976] env[69475]: value = "task-3507629" [ 647.561976] env[69475]: _type = "Task" [ 647.561976] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.561976] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507628, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.654877] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Acquiring lock "e48e2cc1-7d60-457f-8f1c-649f0dda8cdb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 647.655126] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Lock "e48e2cc1-7d60-457f-8f1c-649f0dda8cdb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 647.896563] env[69475]: DEBUG nova.network.neutron [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Successfully created port: 70facb17-ed67-480e-b18e-81f937380d97 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 647.967270] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507624, 'name': CreateVM_Task, 'duration_secs': 0.555848} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.967705] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 647.968432] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.969049] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.969049] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 647.970663] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba2fad56-e146-47aa-9d06-7cd72acac06e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.976031] env[69475]: DEBUG oslo_concurrency.lockutils [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 647.980570] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 647.980570] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f13947-1c26-95d5-82ef-00860d60122b" [ 647.980570] env[69475]: _type = "Task" [ 647.980570] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.995427] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f13947-1c26-95d5-82ef-00860d60122b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.995732] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0ee53ac6-0856-491c-9a2a-862aa884b51e tempest-ServerDiagnosticsNegativeTest-1288083493 tempest-ServerDiagnosticsNegativeTest-1288083493-project-member] Lock "48bc79bc-df56-4523-808f-a71b391062b9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.316s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 647.998349] env[69475]: DEBUG oslo_concurrency.lockutils [req-04b45a78-dd4b-4cbf-9845-39bc5647a98a req-5b2991a2-3ed5-420b-9e51-c2dada1eafd0 service nova] Releasing lock "refresh_cache-a21ec73a-2658-4fc6-9bc1-0e492385d59e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 648.061420] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507628, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.071067] env[69475]: DEBUG oslo_vmware.api [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507629, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.464488} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.071345] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 648.071495] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 648.071657] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 648.072161] env[69475]: INFO nova.compute.manager [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Took 1.19 seconds to destroy the instance on the hypervisor. [ 648.072415] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 648.072622] env[69475]: DEBUG nova.compute.manager [-] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 648.072715] env[69475]: DEBUG nova.network.neutron [-] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 648.258352] env[69475]: DEBUG nova.network.neutron [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Successfully updated port: 4bac6b08-29dc-45f8-bd32-4adb28c6ea48 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 648.501617] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f13947-1c26-95d5-82ef-00860d60122b, 'name': SearchDatastore_Task, 'duration_secs': 0.015214} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.504657] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 648.504932] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 648.507240] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.507240] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 648.507240] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 648.507240] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b764f729-634e-4ec3-83b5-041b993b7563 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.519158] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 648.519158] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 648.520039] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-613dc755-5195-45e8-9902-54ab173feb09 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.526350] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 648.526350] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5274eedb-cadf-4e79-b8ed-83f3e79ce010" [ 648.526350] env[69475]: _type = "Task" [ 648.526350] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.539726] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5274eedb-cadf-4e79-b8ed-83f3e79ce010, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.555357] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507628, 'name': ReconfigVM_Task, 'duration_secs': 0.785842} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.555779] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Reconfigured VM instance instance-00000011 to attach disk [datastore1] a21ec73a-2658-4fc6-9bc1-0e492385d59e/a21ec73a-2658-4fc6-9bc1-0e492385d59e.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 648.556479] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ed231f70-eced-4d3b-a605-ce613f907359 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.567347] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 648.567347] env[69475]: value = "task-3507630" [ 648.567347] env[69475]: _type = "Task" [ 648.567347] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.575045] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507630, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.765465] env[69475]: DEBUG oslo_concurrency.lockutils [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquiring lock "refresh_cache-d1a316d5-59ef-4286-9d7e-a444ffadc49d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.766852] env[69475]: DEBUG oslo_concurrency.lockutils [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquired lock "refresh_cache-d1a316d5-59ef-4286-9d7e-a444ffadc49d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 648.767072] env[69475]: DEBUG nova.network.neutron [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 648.979084] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c7249b-d7be-4813-8ce6-02276d16fb57 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.990201] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca4d53c4-2a0f-442d-b437-d5d40297ee34 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.021584] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050eaa9e-94e4-4985-99aa-fbaba4e7498c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.037085] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79ae02d0-fa4e-4296-beda-95dab1dcbea6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.046765] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5274eedb-cadf-4e79-b8ed-83f3e79ce010, 'name': SearchDatastore_Task, 'duration_secs': 0.022399} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.056560] env[69475]: DEBUG nova.compute.provider_tree [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 649.058687] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e6f047c-6759-4a69-8ae4-96b82cddefe0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.064881] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 649.064881] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52574276-8888-6471-a298-aa79f4f87f0f" [ 649.064881] env[69475]: _type = "Task" [ 649.064881] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.080379] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52574276-8888-6471-a298-aa79f4f87f0f, 'name': SearchDatastore_Task} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.081955] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 649.082521] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] c3db35f4-f43d-464c-9556-18a90866ee6a/c3db35f4-f43d-464c-9556-18a90866ee6a.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 649.082521] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507630, 'name': Rename_Task, 'duration_secs': 0.310632} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.082742] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bec18e90-7f1a-4910-b1cb-fab587c9eb3c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.087058] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 649.087058] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fe6f66c6-b0c1-41ed-adce-2babaf6c11e6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.092559] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 649.092559] env[69475]: value = "task-3507631" [ 649.092559] env[69475]: _type = "Task" [ 649.092559] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.095588] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 649.095588] env[69475]: value = "task-3507632" [ 649.095588] env[69475]: _type = "Task" [ 649.095588] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.113290] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507631, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.116949] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507632, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.145115] env[69475]: DEBUG nova.network.neutron [-] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.307832] env[69475]: DEBUG nova.network.neutron [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 649.353638] env[69475]: DEBUG nova.compute.manager [req-0c228303-6f87-4449-b4af-87f8b114e5e0 req-223db7aa-3dd6-44df-996f-404b9d61891a service nova] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Received event network-vif-plugged-00f2c4f8-7075-42f6-94f1-c09274ea941d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 649.353881] env[69475]: DEBUG oslo_concurrency.lockutils [req-0c228303-6f87-4449-b4af-87f8b114e5e0 req-223db7aa-3dd6-44df-996f-404b9d61891a service nova] Acquiring lock "c3db35f4-f43d-464c-9556-18a90866ee6a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.354185] env[69475]: DEBUG oslo_concurrency.lockutils [req-0c228303-6f87-4449-b4af-87f8b114e5e0 req-223db7aa-3dd6-44df-996f-404b9d61891a service nova] Lock "c3db35f4-f43d-464c-9556-18a90866ee6a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.354386] env[69475]: DEBUG oslo_concurrency.lockutils [req-0c228303-6f87-4449-b4af-87f8b114e5e0 req-223db7aa-3dd6-44df-996f-404b9d61891a service nova] Lock "c3db35f4-f43d-464c-9556-18a90866ee6a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.354550] env[69475]: DEBUG nova.compute.manager [req-0c228303-6f87-4449-b4af-87f8b114e5e0 req-223db7aa-3dd6-44df-996f-404b9d61891a service nova] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] No waiting events found dispatching network-vif-plugged-00f2c4f8-7075-42f6-94f1-c09274ea941d {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 649.354716] env[69475]: WARNING nova.compute.manager [req-0c228303-6f87-4449-b4af-87f8b114e5e0 req-223db7aa-3dd6-44df-996f-404b9d61891a service nova] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Received unexpected event network-vif-plugged-00f2c4f8-7075-42f6-94f1-c09274ea941d for instance with vm_state building and task_state spawning. [ 649.354896] env[69475]: DEBUG nova.compute.manager [req-0c228303-6f87-4449-b4af-87f8b114e5e0 req-223db7aa-3dd6-44df-996f-404b9d61891a service nova] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Received event network-changed-00f2c4f8-7075-42f6-94f1-c09274ea941d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 649.355065] env[69475]: DEBUG nova.compute.manager [req-0c228303-6f87-4449-b4af-87f8b114e5e0 req-223db7aa-3dd6-44df-996f-404b9d61891a service nova] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Refreshing instance network info cache due to event network-changed-00f2c4f8-7075-42f6-94f1-c09274ea941d. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 649.355265] env[69475]: DEBUG oslo_concurrency.lockutils [req-0c228303-6f87-4449-b4af-87f8b114e5e0 req-223db7aa-3dd6-44df-996f-404b9d61891a service nova] Acquiring lock "refresh_cache-c3db35f4-f43d-464c-9556-18a90866ee6a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.355411] env[69475]: DEBUG oslo_concurrency.lockutils [req-0c228303-6f87-4449-b4af-87f8b114e5e0 req-223db7aa-3dd6-44df-996f-404b9d61891a service nova] Acquired lock "refresh_cache-c3db35f4-f43d-464c-9556-18a90866ee6a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.355558] env[69475]: DEBUG nova.network.neutron [req-0c228303-6f87-4449-b4af-87f8b114e5e0 req-223db7aa-3dd6-44df-996f-404b9d61891a service nova] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Refreshing network info cache for port 00f2c4f8-7075-42f6-94f1-c09274ea941d {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 649.471582] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Acquiring lock "4c2e12bf-3f16-47de-a604-44b62a6c7137" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.471848] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Lock "4c2e12bf-3f16-47de-a604-44b62a6c7137" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.498426] env[69475]: DEBUG nova.network.neutron [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Updating instance_info_cache with network_info: [{"id": "4bac6b08-29dc-45f8-bd32-4adb28c6ea48", "address": "fa:16:3e:50:f7:f7", "network": {"id": "1ccefa75-7f28-427a-a2dc-65225b56bc7d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-673445520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "47bcbe5bc3a14fbf9ea9617ea7d50342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bac6b08-29", "ovs_interfaceid": "4bac6b08-29dc-45f8-bd32-4adb28c6ea48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.562451] env[69475]: DEBUG nova.scheduler.client.report [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 649.615767] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507631, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.620155] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507632, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.648706] env[69475]: INFO nova.compute.manager [-] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Took 1.57 seconds to deallocate network for instance. [ 649.999650] env[69475]: DEBUG oslo_concurrency.lockutils [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Releasing lock "refresh_cache-d1a316d5-59ef-4286-9d7e-a444ffadc49d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.000099] env[69475]: DEBUG nova.compute.manager [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Instance network_info: |[{"id": "4bac6b08-29dc-45f8-bd32-4adb28c6ea48", "address": "fa:16:3e:50:f7:f7", "network": {"id": "1ccefa75-7f28-427a-a2dc-65225b56bc7d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-673445520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "47bcbe5bc3a14fbf9ea9617ea7d50342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bac6b08-29", "ovs_interfaceid": "4bac6b08-29dc-45f8-bd32-4adb28c6ea48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 650.000569] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:f7:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4bac6b08-29dc-45f8-bd32-4adb28c6ea48', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 650.008858] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Creating folder: Project (47bcbe5bc3a14fbf9ea9617ea7d50342). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 650.009288] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c1b2b26d-2d7e-4a15-a4f6-79b7a7f769a8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.020542] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Created folder: Project (47bcbe5bc3a14fbf9ea9617ea7d50342) in parent group-v700823. [ 650.020777] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Creating folder: Instances. Parent ref: group-v700877. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 650.021262] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2e9fca03-4366-41c8-b61d-b9aaf1ab56f9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.034321] env[69475]: DEBUG nova.network.neutron [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Successfully updated port: 70facb17-ed67-480e-b18e-81f937380d97 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 650.035604] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Created folder: Instances in parent group-v700877. [ 650.035696] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 650.036149] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 650.036977] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ea15e09-0566-4ca3-bfc1-de3766235c40 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.058929] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 650.058929] env[69475]: value = "task-3507635" [ 650.058929] env[69475]: _type = "Task" [ 650.058929] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.070851] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.631s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.071342] env[69475]: DEBUG nova.compute.manager [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 650.073815] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507635, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.077494] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.380s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.078967] env[69475]: INFO nova.compute.claims [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 650.106934] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507631, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.649044} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.113397] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] c3db35f4-f43d-464c-9556-18a90866ee6a/c3db35f4-f43d-464c-9556-18a90866ee6a.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 650.113752] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 650.114076] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2b9595f2-b441-4984-a0ee-6d0363724315 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.124824] env[69475]: DEBUG oslo_vmware.api [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507632, 'name': PowerOnVM_Task, 'duration_secs': 0.707052} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.126260] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 650.126491] env[69475]: INFO nova.compute.manager [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Took 12.92 seconds to spawn the instance on the hypervisor. [ 650.126687] env[69475]: DEBUG nova.compute.manager [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 650.127015] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 650.127015] env[69475]: value = "task-3507636" [ 650.127015] env[69475]: _type = "Task" [ 650.127015] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.128727] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba4b08a7-d7c7-4f90-ac48-8acacd3c373c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.144199] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507636, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.159212] env[69475]: DEBUG oslo_concurrency.lockutils [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.230404] env[69475]: DEBUG nova.network.neutron [req-0c228303-6f87-4449-b4af-87f8b114e5e0 req-223db7aa-3dd6-44df-996f-404b9d61891a service nova] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Updated VIF entry in instance network info cache for port 00f2c4f8-7075-42f6-94f1-c09274ea941d. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 650.230698] env[69475]: DEBUG nova.network.neutron [req-0c228303-6f87-4449-b4af-87f8b114e5e0 req-223db7aa-3dd6-44df-996f-404b9d61891a service nova] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Updating instance_info_cache with network_info: [{"id": "00f2c4f8-7075-42f6-94f1-c09274ea941d", "address": "fa:16:3e:14:ee:c4", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap00f2c4f8-70", "ovs_interfaceid": "00f2c4f8-7075-42f6-94f1-c09274ea941d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.504705] env[69475]: DEBUG nova.compute.manager [req-99f8d695-8302-4d91-9ea2-ec87b1f071a7 req-400cbebf-b8c3-4882-bdc4-f3a07723c5c8 service nova] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Received event network-vif-deleted-595d3b80-121a-4ab1-9ece-34303f1a5b18 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 650.537034] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Acquiring lock "refresh_cache-3c253a57-1c93-4e8d-aaa1-1331c0547d85" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.537517] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Acquired lock "refresh_cache-3c253a57-1c93-4e8d-aaa1-1331c0547d85" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 650.538015] env[69475]: DEBUG nova.network.neutron [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 650.571749] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507635, 'name': CreateVM_Task, 'duration_secs': 0.382536} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.571945] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 650.572677] env[69475]: DEBUG oslo_concurrency.lockutils [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.572834] env[69475]: DEBUG oslo_concurrency.lockutils [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 650.573237] env[69475]: DEBUG oslo_concurrency.lockutils [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 650.574711] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5f12bd6-30fb-4742-b487-86049bec0e85 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.579061] env[69475]: DEBUG nova.compute.utils [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 650.583025] env[69475]: DEBUG nova.compute.manager [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 650.583025] env[69475]: DEBUG nova.network.neutron [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 650.585041] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 650.585041] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d45b5b-143f-7f4b-770d-9cc422a43d51" [ 650.585041] env[69475]: _type = "Task" [ 650.585041] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.597636] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d45b5b-143f-7f4b-770d-9cc422a43d51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.663032] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507636, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.497106} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.666376] env[69475]: DEBUG nova.policy [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9309c0eabe544a64afa0f8332a5b7abd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a4238057618546babe78b7b37966652e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 650.669911] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 650.670737] env[69475]: INFO nova.compute.manager [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Took 26.38 seconds to build instance. [ 650.673664] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66546e2-dabe-439f-8fdd-b693e228aa20 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.720302] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] c3db35f4-f43d-464c-9556-18a90866ee6a/c3db35f4-f43d-464c-9556-18a90866ee6a.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 650.720302] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87a96aad-1b20-477a-bd5c-d467ce45d630 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.747315] env[69475]: DEBUG oslo_concurrency.lockutils [req-0c228303-6f87-4449-b4af-87f8b114e5e0 req-223db7aa-3dd6-44df-996f-404b9d61891a service nova] Releasing lock "refresh_cache-c3db35f4-f43d-464c-9556-18a90866ee6a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.753854] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 650.753854] env[69475]: value = "task-3507637" [ 650.753854] env[69475]: _type = "Task" [ 650.753854] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.768428] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507637, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.057279] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Acquiring lock "8fbabf86-be9e-47ec-8c4c-adea4c68abe8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.057542] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Lock "8fbabf86-be9e-47ec-8c4c-adea4c68abe8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.082500] env[69475]: DEBUG nova.compute.manager [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 651.110440] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Acquiring lock "6f530b86-2ed1-41db-929c-8a5dd61d931a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.110440] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Lock "6f530b86-2ed1-41db-929c-8a5dd61d931a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.111150] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d45b5b-143f-7f4b-770d-9cc422a43d51, 'name': SearchDatastore_Task, 'duration_secs': 0.031975} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.111569] env[69475]: DEBUG oslo_concurrency.lockutils [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.112072] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 651.112072] env[69475]: DEBUG oslo_concurrency.lockutils [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.112253] env[69475]: DEBUG oslo_concurrency.lockutils [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.113690] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 651.113690] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7e124b3-3920-4b14-b60e-9eb554f54fe0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.115710] env[69475]: DEBUG nova.network.neutron [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 651.129019] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 651.129019] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 651.129019] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a568662-f270-48bd-a595-cf0719352261 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.146289] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 651.146289] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52088581-9f25-8cee-9841-360b801ba99b" [ 651.146289] env[69475]: _type = "Task" [ 651.146289] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.155953] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52088581-9f25-8cee-9841-360b801ba99b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.181567] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13b4fd48-0f3c-42d0-b981-987e8f45831a tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "a21ec73a-2658-4fc6-9bc1-0e492385d59e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.900s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 651.269901] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507637, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.350266] env[69475]: DEBUG nova.network.neutron [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Successfully created port: 4c4728a8-f4a8-44f7-9492-8e43fbf061ae {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 651.509113] env[69475]: DEBUG nova.network.neutron [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Updating instance_info_cache with network_info: [{"id": "70facb17-ed67-480e-b18e-81f937380d97", "address": "fa:16:3e:dd:72:51", "network": {"id": "1c6ad9fe-22bf-42ef-9106-5dc662ea5c5a", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1559876463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "a21fd326b7d4424eba76e774925f5beb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70facb17-ed", "ovs_interfaceid": "70facb17-ed67-480e-b18e-81f937380d97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.514059] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Acquiring lock "a22a4d65-56eb-4313-bd0e-81148981f5b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.516191] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Lock "a22a4d65-56eb-4313-bd0e-81148981f5b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.516191] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Acquiring lock "a22a4d65-56eb-4313-bd0e-81148981f5b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.516191] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Lock "a22a4d65-56eb-4313-bd0e-81148981f5b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.516191] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Lock "a22a4d65-56eb-4313-bd0e-81148981f5b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 651.519952] env[69475]: INFO nova.compute.manager [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Terminating instance [ 651.660756] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52088581-9f25-8cee-9841-360b801ba99b, 'name': SearchDatastore_Task, 'duration_secs': 0.033643} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.661589] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b13a2b9f-67b4-44fe-9c93-873d540d5f58 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.671517] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f314c67c-a64a-4fe9-909d-41c6ff7f512f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.674229] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 651.674229] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ca1e18-1841-131c-40d3-0acbe47cf23e" [ 651.674229] env[69475]: _type = "Task" [ 651.674229] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.680432] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d9d0509-c37e-4b6c-add3-9c9e5eceb115 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.687457] env[69475]: DEBUG nova.compute.manager [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 651.689881] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ca1e18-1841-131c-40d3-0acbe47cf23e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.738356] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46759d6a-4947-4f08-8f1b-35d13c850039 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.747186] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2b84dd4-0981-4b0e-90d4-141dad5e9db6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.761890] env[69475]: DEBUG nova.compute.provider_tree [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 651.771362] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507637, 'name': ReconfigVM_Task, 'duration_secs': 0.57013} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.772253] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Reconfigured VM instance instance-00000012 to attach disk [datastore1] c3db35f4-f43d-464c-9556-18a90866ee6a/c3db35f4-f43d-464c-9556-18a90866ee6a.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 651.772905] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04380610-0e2f-40d3-b5dc-45ce26a1e6b4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.779196] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 651.779196] env[69475]: value = "task-3507638" [ 651.779196] env[69475]: _type = "Task" [ 651.779196] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.787817] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507638, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.015292] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Releasing lock "refresh_cache-3c253a57-1c93-4e8d-aaa1-1331c0547d85" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 652.015802] env[69475]: DEBUG nova.compute.manager [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Instance network_info: |[{"id": "70facb17-ed67-480e-b18e-81f937380d97", "address": "fa:16:3e:dd:72:51", "network": {"id": "1c6ad9fe-22bf-42ef-9106-5dc662ea5c5a", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1559876463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "a21fd326b7d4424eba76e774925f5beb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70facb17-ed", "ovs_interfaceid": "70facb17-ed67-480e-b18e-81f937380d97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 652.016522] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dd:72:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '669e4919-e0ad-4e23-9f23-4c5f2be0d858', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '70facb17-ed67-480e-b18e-81f937380d97', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 652.028129] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Creating folder: Project (a21fd326b7d4424eba76e774925f5beb). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 652.029022] env[69475]: DEBUG nova.compute.manager [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 652.029332] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 652.029630] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7aa3dd25-1fcf-4e9d-8e84-52931ddba050 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.032151] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0afd7047-26a5-4a07-854f-9826476cfab3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.040563] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 652.041882] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db0cf062-0b56-4f71-a9f6-180b448da62b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.045368] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Created folder: Project (a21fd326b7d4424eba76e774925f5beb) in parent group-v700823. [ 652.046018] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Creating folder: Instances. Parent ref: group-v700880. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 652.046156] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4f889bae-f686-4724-9348-99b42dc82fe3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.050638] env[69475]: DEBUG oslo_vmware.api [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Waiting for the task: (returnval){ [ 652.050638] env[69475]: value = "task-3507640" [ 652.050638] env[69475]: _type = "Task" [ 652.050638] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.055610] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Created folder: Instances in parent group-v700880. [ 652.055883] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 652.056447] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 652.056739] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6df903c-2f97-45da-873c-7ab137b86f71 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.082583] env[69475]: DEBUG oslo_vmware.api [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': task-3507640, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.090252] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 652.090252] env[69475]: value = "task-3507642" [ 652.090252] env[69475]: _type = "Task" [ 652.090252] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.098659] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507642, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.102032] env[69475]: DEBUG nova.compute.manager [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 652.133191] env[69475]: DEBUG nova.virt.hardware [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 652.133756] env[69475]: DEBUG nova.virt.hardware [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 652.134900] env[69475]: DEBUG nova.virt.hardware [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 652.134900] env[69475]: DEBUG nova.virt.hardware [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 652.134900] env[69475]: DEBUG nova.virt.hardware [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 652.134900] env[69475]: DEBUG nova.virt.hardware [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 652.134900] env[69475]: DEBUG nova.virt.hardware [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 652.135377] env[69475]: DEBUG nova.virt.hardware [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 652.135377] env[69475]: DEBUG nova.virt.hardware [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 652.135377] env[69475]: DEBUG nova.virt.hardware [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 652.135673] env[69475]: DEBUG nova.virt.hardware [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 652.136682] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f322027a-e723-4687-8543-10e38e0b0498 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.146880] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f96738b-f8ff-474e-91fd-c8d3812a723d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.184554] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ca1e18-1841-131c-40d3-0acbe47cf23e, 'name': SearchDatastore_Task, 'duration_secs': 0.026249} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.184917] env[69475]: DEBUG oslo_concurrency.lockutils [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 652.185249] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] d1a316d5-59ef-4286-9d7e-a444ffadc49d/d1a316d5-59ef-4286-9d7e-a444ffadc49d.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 652.185561] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c5fcdc2-6059-4293-a73c-de100134a342 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.194050] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 652.194050] env[69475]: value = "task-3507643" [ 652.194050] env[69475]: _type = "Task" [ 652.194050] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.205615] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507643, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.210467] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.267878] env[69475]: DEBUG nova.scheduler.client.report [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 652.292021] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507638, 'name': Rename_Task, 'duration_secs': 0.380646} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.292210] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 652.292498] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f950bfd9-395b-4539-98c2-2da67df9703e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.299639] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 652.299639] env[69475]: value = "task-3507644" [ 652.299639] env[69475]: _type = "Task" [ 652.299639] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.309093] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507644, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.567716] env[69475]: DEBUG oslo_vmware.api [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': task-3507640, 'name': PowerOffVM_Task, 'duration_secs': 0.288304} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.569982] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 652.569982] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 652.571795] env[69475]: DEBUG nova.compute.manager [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Received event network-vif-plugged-4bac6b08-29dc-45f8-bd32-4adb28c6ea48 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 652.571795] env[69475]: DEBUG oslo_concurrency.lockutils [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] Acquiring lock "d1a316d5-59ef-4286-9d7e-a444ffadc49d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 652.571795] env[69475]: DEBUG oslo_concurrency.lockutils [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] Lock "d1a316d5-59ef-4286-9d7e-a444ffadc49d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.571969] env[69475]: DEBUG oslo_concurrency.lockutils [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] Lock "d1a316d5-59ef-4286-9d7e-a444ffadc49d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.572144] env[69475]: DEBUG nova.compute.manager [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] No waiting events found dispatching network-vif-plugged-4bac6b08-29dc-45f8-bd32-4adb28c6ea48 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 652.572383] env[69475]: WARNING nova.compute.manager [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Received unexpected event network-vif-plugged-4bac6b08-29dc-45f8-bd32-4adb28c6ea48 for instance with vm_state building and task_state spawning. [ 652.572548] env[69475]: DEBUG nova.compute.manager [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Received event network-changed-4bac6b08-29dc-45f8-bd32-4adb28c6ea48 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 652.572715] env[69475]: DEBUG nova.compute.manager [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Refreshing instance network info cache due to event network-changed-4bac6b08-29dc-45f8-bd32-4adb28c6ea48. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 652.573220] env[69475]: DEBUG oslo_concurrency.lockutils [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] Acquiring lock "refresh_cache-d1a316d5-59ef-4286-9d7e-a444ffadc49d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.573425] env[69475]: DEBUG oslo_concurrency.lockutils [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] Acquired lock "refresh_cache-d1a316d5-59ef-4286-9d7e-a444ffadc49d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 652.576840] env[69475]: DEBUG nova.network.neutron [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Refreshing network info cache for port 4bac6b08-29dc-45f8-bd32-4adb28c6ea48 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 652.576840] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0c6e81e-5198-4a11-8206-5023a51cc5c9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.603159] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507642, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.649296] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 652.649652] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 652.650009] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Deleting the datastore file [datastore1] a22a4d65-56eb-4313-bd0e-81148981f5b8 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 652.650339] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2acd8b72-79d9-4d5c-a107-09c439e520f0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.658964] env[69475]: DEBUG oslo_vmware.api [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Waiting for the task: (returnval){ [ 652.658964] env[69475]: value = "task-3507646" [ 652.658964] env[69475]: _type = "Task" [ 652.658964] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.671802] env[69475]: DEBUG oslo_vmware.api [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': task-3507646, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.709567] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507643, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.775938] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.698s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.779031] env[69475]: DEBUG nova.compute.manager [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 652.781640] env[69475]: DEBUG oslo_concurrency.lockutils [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.961s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 652.782793] env[69475]: DEBUG nova.objects.instance [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Lazy-loading 'resources' on Instance uuid 67287947-ecce-4462-8268-23bcc7421766 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 652.814548] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507644, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.021497] env[69475]: DEBUG oslo_concurrency.lockutils [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.022165] env[69475]: DEBUG oslo_concurrency.lockutils [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.022407] env[69475]: DEBUG oslo_concurrency.lockutils [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.022611] env[69475]: DEBUG oslo_concurrency.lockutils [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.023771] env[69475]: DEBUG oslo_concurrency.lockutils [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 653.029323] env[69475]: INFO nova.compute.manager [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Terminating instance [ 653.108144] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507642, 'name': CreateVM_Task, 'duration_secs': 0.827986} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.108144] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 653.109689] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.109689] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.109689] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 653.109689] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99b41354-f46a-4022-9f61-f56746a679e1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.116887] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Waiting for the task: (returnval){ [ 653.116887] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526cb85d-efc5-62ef-d2a0-41be5b8e8c38" [ 653.116887] env[69475]: _type = "Task" [ 653.116887] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.126732] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526cb85d-efc5-62ef-d2a0-41be5b8e8c38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.172054] env[69475]: DEBUG oslo_vmware.api [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Task: {'id': task-3507646, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.328944} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.172054] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 653.172054] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 653.172252] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 653.172292] env[69475]: INFO nova.compute.manager [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Took 1.14 seconds to destroy the instance on the hypervisor. [ 653.173626] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 653.173626] env[69475]: DEBUG nova.compute.manager [-] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 653.173626] env[69475]: DEBUG nova.network.neutron [-] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 653.207318] env[69475]: DEBUG nova.compute.manager [req-021c95a4-8243-4fa2-afa6-768937351246 req-f6011f4c-1765-43e1-97e7-5e95583cfe7f service nova] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Received event network-vif-plugged-70facb17-ed67-480e-b18e-81f937380d97 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 653.207535] env[69475]: DEBUG oslo_concurrency.lockutils [req-021c95a4-8243-4fa2-afa6-768937351246 req-f6011f4c-1765-43e1-97e7-5e95583cfe7f service nova] Acquiring lock "3c253a57-1c93-4e8d-aaa1-1331c0547d85-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 653.207739] env[69475]: DEBUG oslo_concurrency.lockutils [req-021c95a4-8243-4fa2-afa6-768937351246 req-f6011f4c-1765-43e1-97e7-5e95583cfe7f service nova] Lock "3c253a57-1c93-4e8d-aaa1-1331c0547d85-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 653.207898] env[69475]: DEBUG oslo_concurrency.lockutils [req-021c95a4-8243-4fa2-afa6-768937351246 req-f6011f4c-1765-43e1-97e7-5e95583cfe7f service nova] Lock "3c253a57-1c93-4e8d-aaa1-1331c0547d85-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 653.211300] env[69475]: DEBUG nova.compute.manager [req-021c95a4-8243-4fa2-afa6-768937351246 req-f6011f4c-1765-43e1-97e7-5e95583cfe7f service nova] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] No waiting events found dispatching network-vif-plugged-70facb17-ed67-480e-b18e-81f937380d97 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 653.211668] env[69475]: WARNING nova.compute.manager [req-021c95a4-8243-4fa2-afa6-768937351246 req-f6011f4c-1765-43e1-97e7-5e95583cfe7f service nova] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Received unexpected event network-vif-plugged-70facb17-ed67-480e-b18e-81f937380d97 for instance with vm_state building and task_state spawning. [ 653.211755] env[69475]: DEBUG nova.compute.manager [req-021c95a4-8243-4fa2-afa6-768937351246 req-f6011f4c-1765-43e1-97e7-5e95583cfe7f service nova] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Received event network-changed-70facb17-ed67-480e-b18e-81f937380d97 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 653.211907] env[69475]: DEBUG nova.compute.manager [req-021c95a4-8243-4fa2-afa6-768937351246 req-f6011f4c-1765-43e1-97e7-5e95583cfe7f service nova] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Refreshing instance network info cache due to event network-changed-70facb17-ed67-480e-b18e-81f937380d97. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 653.212131] env[69475]: DEBUG oslo_concurrency.lockutils [req-021c95a4-8243-4fa2-afa6-768937351246 req-f6011f4c-1765-43e1-97e7-5e95583cfe7f service nova] Acquiring lock "refresh_cache-3c253a57-1c93-4e8d-aaa1-1331c0547d85" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.212281] env[69475]: DEBUG oslo_concurrency.lockutils [req-021c95a4-8243-4fa2-afa6-768937351246 req-f6011f4c-1765-43e1-97e7-5e95583cfe7f service nova] Acquired lock "refresh_cache-3c253a57-1c93-4e8d-aaa1-1331c0547d85" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.212423] env[69475]: DEBUG nova.network.neutron [req-021c95a4-8243-4fa2-afa6-768937351246 req-f6011f4c-1765-43e1-97e7-5e95583cfe7f service nova] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Refreshing network info cache for port 70facb17-ed67-480e-b18e-81f937380d97 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 653.218333] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507643, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.870893} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.218609] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] d1a316d5-59ef-4286-9d7e-a444ffadc49d/d1a316d5-59ef-4286-9d7e-a444ffadc49d.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 653.218825] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 653.219096] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ac5c0c0d-e78b-4cbf-a4a9-974542108bd6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.229042] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 653.229042] env[69475]: value = "task-3507647" [ 653.229042] env[69475]: _type = "Task" [ 653.229042] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.245504] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507647, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.287692] env[69475]: DEBUG nova.compute.utils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 653.294012] env[69475]: DEBUG nova.compute.manager [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 653.294206] env[69475]: DEBUG nova.network.neutron [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 653.316739] env[69475]: DEBUG oslo_vmware.api [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507644, 'name': PowerOnVM_Task, 'duration_secs': 0.654684} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.317118] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 653.318111] env[69475]: INFO nova.compute.manager [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Took 13.63 seconds to spawn the instance on the hypervisor. [ 653.318111] env[69475]: DEBUG nova.compute.manager [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 653.319285] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865dcd6c-0d11-4d16-bd58-903c940602bc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.460879] env[69475]: DEBUG nova.policy [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35701016696a4f57a1c34462e46e99d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '02d595a3575a40799470947426047e69', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 653.536185] env[69475]: DEBUG nova.compute.manager [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 653.536185] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 653.537118] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256bc3f4-809a-4ff0-bb4a-9c77449a84c3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.547960] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 653.548336] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7fecde39-5927-49e8-9acd-96b1b590923c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.559111] env[69475]: DEBUG oslo_vmware.api [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 653.559111] env[69475]: value = "task-3507648" [ 653.559111] env[69475]: _type = "Task" [ 653.559111] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.573982] env[69475]: DEBUG oslo_vmware.api [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507648, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.633998] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526cb85d-efc5-62ef-d2a0-41be5b8e8c38, 'name': SearchDatastore_Task, 'duration_secs': 0.041333} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.636529] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.636776] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 653.637012] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.637169] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 653.637350] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 653.637817] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e23d4585-e103-4847-b068-87fc99b93673 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.647473] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 653.647670] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 653.648431] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-916c3599-85c8-4cfd-bc32-8f330fd07c05 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.657833] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Waiting for the task: (returnval){ [ 653.657833] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521d364e-4239-29b4-f8f6-8659fcd69e99" [ 653.657833] env[69475]: _type = "Task" [ 653.657833] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.672257] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521d364e-4239-29b4-f8f6-8659fcd69e99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.739442] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507647, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064739} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.739853] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 653.740607] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d572446a-ae96-415f-bc2a-3d43ebe193cb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.769133] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Reconfiguring VM instance instance-00000013 to attach disk [datastore2] d1a316d5-59ef-4286-9d7e-a444ffadc49d/d1a316d5-59ef-4286-9d7e-a444ffadc49d.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 653.772137] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-356de322-2c69-4167-9e92-0da9c5b713a2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.791647] env[69475]: DEBUG nova.compute.manager [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 653.795809] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 653.795809] env[69475]: value = "task-3507649" [ 653.795809] env[69475]: _type = "Task" [ 653.795809] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.804784] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507649, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.848156] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8a31ee-e92f-4f25-b934-d3a87e6f661f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.853508] env[69475]: INFO nova.compute.manager [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Took 28.45 seconds to build instance. [ 653.859183] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6192825c-c8ff-405c-9da2-bf8db2c28944 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.899139] env[69475]: DEBUG nova.network.neutron [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Successfully updated port: 4c4728a8-f4a8-44f7-9492-8e43fbf061ae {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 653.901988] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbcb52f9-85b9-4d4e-ae83-b9d334044007 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.910807] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092dc24d-d653-459e-935b-bd2b621923d2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.928370] env[69475]: DEBUG nova.compute.provider_tree [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 653.930381] env[69475]: DEBUG nova.network.neutron [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Updated VIF entry in instance network info cache for port 4bac6b08-29dc-45f8-bd32-4adb28c6ea48. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 653.930714] env[69475]: DEBUG nova.network.neutron [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Updating instance_info_cache with network_info: [{"id": "4bac6b08-29dc-45f8-bd32-4adb28c6ea48", "address": "fa:16:3e:50:f7:f7", "network": {"id": "1ccefa75-7f28-427a-a2dc-65225b56bc7d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-673445520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "47bcbe5bc3a14fbf9ea9617ea7d50342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bac6b08-29", "ovs_interfaceid": "4bac6b08-29dc-45f8-bd32-4adb28c6ea48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.068717] env[69475]: DEBUG oslo_vmware.api [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507648, 'name': PowerOffVM_Task, 'duration_secs': 0.417655} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.069737] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 654.069737] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 654.069737] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1963004-99fd-4258-aef2-b8b2f5d14993 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.155213] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 654.155213] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 654.155213] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleting the datastore file [datastore1] 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 654.155213] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3adaa858-5a63-4e13-8af4-9c7b95758886 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.169296] env[69475]: DEBUG oslo_vmware.api [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 654.169296] env[69475]: value = "task-3507651" [ 654.169296] env[69475]: _type = "Task" [ 654.169296] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.180250] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521d364e-4239-29b4-f8f6-8659fcd69e99, 'name': SearchDatastore_Task, 'duration_secs': 0.023687} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.186188] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40d0b3c9-4581-4093-a455-8b782a3bd90b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.189132] env[69475]: DEBUG oslo_vmware.api [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507651, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.193263] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Waiting for the task: (returnval){ [ 654.193263] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d17cdd-4519-8152-9722-ab675144bf45" [ 654.193263] env[69475]: _type = "Task" [ 654.193263] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.205823] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d17cdd-4519-8152-9722-ab675144bf45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.243441] env[69475]: DEBUG nova.network.neutron [req-021c95a4-8243-4fa2-afa6-768937351246 req-f6011f4c-1765-43e1-97e7-5e95583cfe7f service nova] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Updated VIF entry in instance network info cache for port 70facb17-ed67-480e-b18e-81f937380d97. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 654.243754] env[69475]: DEBUG nova.network.neutron [req-021c95a4-8243-4fa2-afa6-768937351246 req-f6011f4c-1765-43e1-97e7-5e95583cfe7f service nova] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Updating instance_info_cache with network_info: [{"id": "70facb17-ed67-480e-b18e-81f937380d97", "address": "fa:16:3e:dd:72:51", "network": {"id": "1c6ad9fe-22bf-42ef-9106-5dc662ea5c5a", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1559876463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "a21fd326b7d4424eba76e774925f5beb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "669e4919-e0ad-4e23-9f23-4c5f2be0d858", "external-id": "nsx-vlan-transportzone-362", "segmentation_id": 362, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap70facb17-ed", "ovs_interfaceid": "70facb17-ed67-480e-b18e-81f937380d97", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.315515] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507649, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.358301] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a10d71f-851f-4d9c-b69b-65106ddf74b8 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "c3db35f4-f43d-464c-9556-18a90866ee6a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.962s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 654.410440] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquiring lock "refresh_cache-77a5665d-b00f-42c2-a1e8-319dfd232b06" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.410440] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquired lock "refresh_cache-77a5665d-b00f-42c2-a1e8-319dfd232b06" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.410440] env[69475]: DEBUG nova.network.neutron [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 654.434282] env[69475]: DEBUG nova.scheduler.client.report [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 654.438306] env[69475]: DEBUG oslo_concurrency.lockutils [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] Releasing lock "refresh_cache-d1a316d5-59ef-4286-9d7e-a444ffadc49d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.438306] env[69475]: DEBUG nova.compute.manager [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] [instance: 93607154-f135-4925-9c3a-a97051535b00] Received event network-changed-29b2e26b-edae-4c53-98e5-15ce643aa4d0 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 654.438306] env[69475]: DEBUG nova.compute.manager [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] [instance: 93607154-f135-4925-9c3a-a97051535b00] Refreshing instance network info cache due to event network-changed-29b2e26b-edae-4c53-98e5-15ce643aa4d0. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 654.438484] env[69475]: DEBUG oslo_concurrency.lockutils [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] Acquiring lock "refresh_cache-93607154-f135-4925-9c3a-a97051535b00" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.439852] env[69475]: DEBUG oslo_concurrency.lockutils [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] Acquired lock "refresh_cache-93607154-f135-4925-9c3a-a97051535b00" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.439852] env[69475]: DEBUG nova.network.neutron [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] [instance: 93607154-f135-4925-9c3a-a97051535b00] Refreshing network info cache for port 29b2e26b-edae-4c53-98e5-15ce643aa4d0 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 654.568401] env[69475]: DEBUG nova.network.neutron [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Successfully created port: 5fa5b65a-d1fb-4e45-8fea-68beefb4f999 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 654.689917] env[69475]: DEBUG nova.network.neutron [-] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.691675] env[69475]: DEBUG oslo_vmware.api [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507651, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154648} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.694084] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 654.694084] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 654.694084] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 654.694084] env[69475]: INFO nova.compute.manager [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Took 1.16 seconds to destroy the instance on the hypervisor. [ 654.694084] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 654.694509] env[69475]: DEBUG nova.compute.manager [-] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 654.694546] env[69475]: DEBUG nova.network.neutron [-] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 654.709265] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d17cdd-4519-8152-9722-ab675144bf45, 'name': SearchDatastore_Task, 'duration_secs': 0.009615} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.710668] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.712197] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 3c253a57-1c93-4e8d-aaa1-1331c0547d85/3c253a57-1c93-4e8d-aaa1-1331c0547d85.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 654.712197] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6625f89f-921a-4ac8-a2fb-52104cdb7fae {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.721127] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Waiting for the task: (returnval){ [ 654.721127] env[69475]: value = "task-3507652" [ 654.721127] env[69475]: _type = "Task" [ 654.721127] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.730247] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': task-3507652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.747578] env[69475]: DEBUG oslo_concurrency.lockutils [req-021c95a4-8243-4fa2-afa6-768937351246 req-f6011f4c-1765-43e1-97e7-5e95583cfe7f service nova] Releasing lock "refresh_cache-3c253a57-1c93-4e8d-aaa1-1331c0547d85" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.803152] env[69475]: DEBUG nova.compute.manager [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 654.824015] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507649, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.833719] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 654.834192] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 654.834192] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 654.834406] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 654.834580] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 654.834772] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 654.835166] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 654.835379] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 654.835549] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 654.835749] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 654.835956] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 654.836803] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4795b7cb-847f-4687-afca-ecfabc1ec9c9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.844568] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07480473-021d-457a-8da6-22808babe2a0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.862673] env[69475]: DEBUG nova.compute.manager [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 654.944971] env[69475]: DEBUG oslo_concurrency.lockutils [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.162s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 654.947986] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.682s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 654.950254] env[69475]: INFO nova.compute.claims [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 654.978700] env[69475]: INFO nova.scheduler.client.report [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Deleted allocations for instance 67287947-ecce-4462-8268-23bcc7421766 [ 655.034426] env[69475]: DEBUG nova.network.neutron [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 655.070550] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "ed12921f-9be8-474d-958e-79dd16b8116e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.073071] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "ed12921f-9be8-474d-958e-79dd16b8116e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 655.199585] env[69475]: INFO nova.compute.manager [-] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Took 2.03 seconds to deallocate network for instance. [ 655.244383] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': task-3507652, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.470684} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.244383] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 3c253a57-1c93-4e8d-aaa1-1331c0547d85/3c253a57-1c93-4e8d-aaa1-1331c0547d85.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 655.244383] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 655.244383] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-56dc5ad7-8515-4795-90a9-e749b3bb8045 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.253725] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Waiting for the task: (returnval){ [ 655.253725] env[69475]: value = "task-3507653" [ 655.253725] env[69475]: _type = "Task" [ 655.253725] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.267679] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': task-3507653, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.314473] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507649, 'name': ReconfigVM_Task, 'duration_secs': 1.276633} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.319338] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Reconfigured VM instance instance-00000013 to attach disk [datastore2] d1a316d5-59ef-4286-9d7e-a444ffadc49d/d1a316d5-59ef-4286-9d7e-a444ffadc49d.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 655.319338] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bf636261-6d3d-49bd-a258-8336b4b4a86d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.325985] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 655.325985] env[69475]: value = "task-3507654" [ 655.325985] env[69475]: _type = "Task" [ 655.325985] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.334884] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507654, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.395837] env[69475]: DEBUG oslo_concurrency.lockutils [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.467084] env[69475]: DEBUG nova.network.neutron [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] [instance: 93607154-f135-4925-9c3a-a97051535b00] Updated VIF entry in instance network info cache for port 29b2e26b-edae-4c53-98e5-15ce643aa4d0. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 655.467084] env[69475]: DEBUG nova.network.neutron [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] [instance: 93607154-f135-4925-9c3a-a97051535b00] Updating instance_info_cache with network_info: [{"id": "29b2e26b-edae-4c53-98e5-15ce643aa4d0", "address": "fa:16:3e:f1:c1:a3", "network": {"id": "7baa3036-0da2-4997-9256-7e09e8122eec", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1554281652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.243", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b14737c5edf94580b711ca21258a8811", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap29b2e26b-ed", "ovs_interfaceid": "29b2e26b-edae-4c53-98e5-15ce643aa4d0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.492145] env[69475]: DEBUG oslo_concurrency.lockutils [None req-128ada5b-ac78-4b91-8c45-052c71276497 tempest-ServersAaction247Test-1651859093 tempest-ServersAaction247Test-1651859093-project-member] Lock "67287947-ecce-4462-8268-23bcc7421766" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.036s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.511156] env[69475]: DEBUG nova.network.neutron [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Updating instance_info_cache with network_info: [{"id": "4c4728a8-f4a8-44f7-9492-8e43fbf061ae", "address": "fa:16:3e:1b:f2:66", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.219", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c4728a8-f4", "ovs_interfaceid": "4c4728a8-f4a8-44f7-9492-8e43fbf061ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.709492] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 655.767858] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': task-3507653, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073621} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.767858] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 655.767858] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6eb0d67-2a25-452c-b43b-8ede795135a3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.797174] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] 3c253a57-1c93-4e8d-aaa1-1331c0547d85/3c253a57-1c93-4e8d-aaa1-1331c0547d85.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 655.799433] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-340e0efd-afda-486b-bc75-d08099b5ad1e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.824397] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Waiting for the task: (returnval){ [ 655.824397] env[69475]: value = "task-3507655" [ 655.824397] env[69475]: _type = "Task" [ 655.824397] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.839647] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': task-3507655, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.840784] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507654, 'name': Rename_Task, 'duration_secs': 0.150242} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.841247] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 655.841681] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a7cc89aa-46c6-465f-b668-d1e7c3ea36d4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.848072] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 655.848072] env[69475]: value = "task-3507656" [ 655.848072] env[69475]: _type = "Task" [ 655.848072] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.851976] env[69475]: DEBUG nova.network.neutron [-] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 655.859769] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507656, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.972341] env[69475]: DEBUG oslo_concurrency.lockutils [req-de1795fd-68da-4af5-b76d-5d97fa9b80b7 req-24f8c233-5339-46db-ab17-0e63a035a59b service nova] Releasing lock "refresh_cache-93607154-f135-4925-9c3a-a97051535b00" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.017632] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Releasing lock "refresh_cache-77a5665d-b00f-42c2-a1e8-319dfd232b06" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.017953] env[69475]: DEBUG nova.compute.manager [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Instance network_info: |[{"id": "4c4728a8-f4a8-44f7-9492-8e43fbf061ae", "address": "fa:16:3e:1b:f2:66", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.219", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c4728a8-f4", "ovs_interfaceid": "4c4728a8-f4a8-44f7-9492-8e43fbf061ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 656.019477] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:f2:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4c4728a8-f4a8-44f7-9492-8e43fbf061ae', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 656.030099] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 656.030369] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 656.030452] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87b53b30-d22d-40dc-9567-fdf10f876233 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.056633] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 656.056633] env[69475]: value = "task-3507657" [ 656.056633] env[69475]: _type = "Task" [ 656.056633] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.067058] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507657, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.229317] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Acquiring lock "00ba5cd8-3516-4059-bcda-c2d01e165e07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.229724] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Lock "00ba5cd8-3516-4059-bcda-c2d01e165e07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.230850] env[69475]: DEBUG nova.compute.manager [req-176ee384-78b6-4ec0-adce-aae60168af7b req-60476b5c-1b2b-4186-81cc-54223e80e0d5 service nova] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Received event network-vif-deleted-b9c9e750-9412-44e9-9898-efc2a703f86b {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 656.243370] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "d1e5e08d-b41a-4655-997d-91fbd3581f00" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.243370] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "d1e5e08d-b41a-4655-997d-91fbd3581f00" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.341866] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': task-3507655, 'name': ReconfigVM_Task, 'duration_secs': 0.291628} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.341866] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Reconfigured VM instance instance-00000014 to attach disk [datastore2] 3c253a57-1c93-4e8d-aaa1-1331c0547d85/3c253a57-1c93-4e8d-aaa1-1331c0547d85.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 656.341866] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-413c4624-5e7c-474b-85ea-7f4563f511bf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.349176] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Waiting for the task: (returnval){ [ 656.349176] env[69475]: value = "task-3507658" [ 656.349176] env[69475]: _type = "Task" [ 656.349176] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.358976] env[69475]: INFO nova.compute.manager [-] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Took 1.66 seconds to deallocate network for instance. [ 656.374659] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507656, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.379142] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': task-3507658, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.551138] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-409dca7c-c41c-4a92-af22-b86c6efd46ca {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.561561] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-223e96dd-4b32-4fbb-ae01-159d74ea556a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.572914] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507657, 'name': CreateVM_Task, 'duration_secs': 0.462122} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.598667] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 656.600097] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.600357] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.600571] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 656.601419] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e20ca8-3549-41de-b3b7-a467f1c1389c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.604057] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eabd1207-77cd-4d8a-8be6-7799c10184ba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.608995] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for the task: (returnval){ [ 656.608995] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c5f76d-b2d1-8280-0ede-1d9b37e255c1" [ 656.608995] env[69475]: _type = "Task" [ 656.608995] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.616613] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d0ee3e-4645-45f9-a039-0f947c48f85d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.642228] env[69475]: DEBUG nova.compute.provider_tree [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.643673] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c5f76d-b2d1-8280-0ede-1d9b37e255c1, 'name': SearchDatastore_Task, 'duration_secs': 0.009665} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.644151] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 656.644377] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 656.644605] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.644745] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.644917] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 656.645184] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9942dac5-ef38-4a11-af69-608f20cef211 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.653497] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 656.654124] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 656.654516] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-99481a17-1889-4744-940c-39c2eee6edf7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.663659] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for the task: (returnval){ [ 656.663659] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522c6442-f23b-79f8-fc34-aaaf442574ae" [ 656.663659] env[69475]: _type = "Task" [ 656.663659] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.673339] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522c6442-f23b-79f8-fc34-aaaf442574ae, 'name': SearchDatastore_Task, 'duration_secs': 0.008888} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.674169] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2263e8d-79de-4194-adda-a809789e1937 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.679267] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for the task: (returnval){ [ 656.679267] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dcf02e-c253-9ef9-8ef4-25eddd3f1fb9" [ 656.679267] env[69475]: _type = "Task" [ 656.679267] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.687372] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dcf02e-c253-9ef9-8ef4-25eddd3f1fb9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.695693] env[69475]: DEBUG nova.compute.manager [req-3b679b48-2ef8-4073-848f-2c9f22b36b37 req-13226f99-cdb3-41c4-a524-f9e1af22af81 service nova] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Received event network-vif-plugged-4c4728a8-f4a8-44f7-9492-8e43fbf061ae {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 656.695693] env[69475]: DEBUG oslo_concurrency.lockutils [req-3b679b48-2ef8-4073-848f-2c9f22b36b37 req-13226f99-cdb3-41c4-a524-f9e1af22af81 service nova] Acquiring lock "77a5665d-b00f-42c2-a1e8-319dfd232b06-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.695693] env[69475]: DEBUG oslo_concurrency.lockutils [req-3b679b48-2ef8-4073-848f-2c9f22b36b37 req-13226f99-cdb3-41c4-a524-f9e1af22af81 service nova] Lock "77a5665d-b00f-42c2-a1e8-319dfd232b06-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.695693] env[69475]: DEBUG oslo_concurrency.lockutils [req-3b679b48-2ef8-4073-848f-2c9f22b36b37 req-13226f99-cdb3-41c4-a524-f9e1af22af81 service nova] Lock "77a5665d-b00f-42c2-a1e8-319dfd232b06-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.695693] env[69475]: DEBUG nova.compute.manager [req-3b679b48-2ef8-4073-848f-2c9f22b36b37 req-13226f99-cdb3-41c4-a524-f9e1af22af81 service nova] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] No waiting events found dispatching network-vif-plugged-4c4728a8-f4a8-44f7-9492-8e43fbf061ae {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 656.695965] env[69475]: WARNING nova.compute.manager [req-3b679b48-2ef8-4073-848f-2c9f22b36b37 req-13226f99-cdb3-41c4-a524-f9e1af22af81 service nova] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Received unexpected event network-vif-plugged-4c4728a8-f4a8-44f7-9492-8e43fbf061ae for instance with vm_state building and task_state spawning. [ 656.695965] env[69475]: DEBUG nova.compute.manager [req-3b679b48-2ef8-4073-848f-2c9f22b36b37 req-13226f99-cdb3-41c4-a524-f9e1af22af81 service nova] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Received event network-changed-4c4728a8-f4a8-44f7-9492-8e43fbf061ae {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 656.695965] env[69475]: DEBUG nova.compute.manager [req-3b679b48-2ef8-4073-848f-2c9f22b36b37 req-13226f99-cdb3-41c4-a524-f9e1af22af81 service nova] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Refreshing instance network info cache due to event network-changed-4c4728a8-f4a8-44f7-9492-8e43fbf061ae. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 656.695965] env[69475]: DEBUG oslo_concurrency.lockutils [req-3b679b48-2ef8-4073-848f-2c9f22b36b37 req-13226f99-cdb3-41c4-a524-f9e1af22af81 service nova] Acquiring lock "refresh_cache-77a5665d-b00f-42c2-a1e8-319dfd232b06" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 656.695965] env[69475]: DEBUG oslo_concurrency.lockutils [req-3b679b48-2ef8-4073-848f-2c9f22b36b37 req-13226f99-cdb3-41c4-a524-f9e1af22af81 service nova] Acquired lock "refresh_cache-77a5665d-b00f-42c2-a1e8-319dfd232b06" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 656.696155] env[69475]: DEBUG nova.network.neutron [req-3b679b48-2ef8-4073-848f-2c9f22b36b37 req-13226f99-cdb3-41c4-a524-f9e1af22af81 service nova] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Refreshing network info cache for port 4c4728a8-f4a8-44f7-9492-8e43fbf061ae {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 656.861525] env[69475]: DEBUG oslo_vmware.api [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507656, 'name': PowerOnVM_Task, 'duration_secs': 0.665808} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.864774] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 656.864984] env[69475]: INFO nova.compute.manager [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Took 12.31 seconds to spawn the instance on the hypervisor. [ 656.868807] env[69475]: DEBUG nova.compute.manager [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 656.869934] env[69475]: DEBUG oslo_concurrency.lockutils [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.870604] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df3cd902-9657-4e5c-b59a-80c374c71afa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.880866] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': task-3507658, 'name': Rename_Task, 'duration_secs': 0.167633} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.883607] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 656.883854] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e97b46f6-5090-4dce-bdd0-98bcab2299d8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.890245] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Waiting for the task: (returnval){ [ 656.890245] env[69475]: value = "task-3507659" [ 656.890245] env[69475]: _type = "Task" [ 656.890245] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.899184] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': task-3507659, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.954919] env[69475]: DEBUG nova.network.neutron [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Successfully updated port: 5fa5b65a-d1fb-4e45-8fea-68beefb4f999 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 657.145956] env[69475]: DEBUG nova.scheduler.client.report [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 657.190305] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dcf02e-c253-9ef9-8ef4-25eddd3f1fb9, 'name': SearchDatastore_Task, 'duration_secs': 0.007931} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.190569] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 657.190881] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 77a5665d-b00f-42c2-a1e8-319dfd232b06/77a5665d-b00f-42c2-a1e8-319dfd232b06.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 657.191083] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d7987f6f-dade-4887-8af1-eedab9ff20ef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.201799] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for the task: (returnval){ [ 657.201799] env[69475]: value = "task-3507660" [ 657.201799] env[69475]: _type = "Task" [ 657.201799] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.211970] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507660, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.396863] env[69475]: INFO nova.compute.manager [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Took 30.40 seconds to build instance. [ 657.406526] env[69475]: DEBUG oslo_vmware.api [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': task-3507659, 'name': PowerOnVM_Task, 'duration_secs': 0.50088} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.406526] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 657.406526] env[69475]: INFO nova.compute.manager [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Took 10.14 seconds to spawn the instance on the hypervisor. [ 657.406526] env[69475]: DEBUG nova.compute.manager [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 657.406861] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227b0ad5-fe4d-4384-a5a3-1297eb9fd5b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.458481] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "refresh_cache-3eda17da-111c-412d-9af4-d3a40b7d8faa" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.458828] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquired lock "refresh_cache-3eda17da-111c-412d-9af4-d3a40b7d8faa" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 657.458910] env[69475]: DEBUG nova.network.neutron [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 657.571291] env[69475]: DEBUG oslo_concurrency.lockutils [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "b41845c6-46bd-4b3b-ab26-d7d2dad08f84" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.571291] env[69475]: DEBUG oslo_concurrency.lockutils [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "b41845c6-46bd-4b3b-ab26-d7d2dad08f84" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.651144] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.703s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.651816] env[69475]: DEBUG nova.compute.manager [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 657.655081] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.935s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.656890] env[69475]: DEBUG nova.objects.instance [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Lazy-loading 'resources' on Instance uuid af5dc581-cf6a-4b84-8bcf-96606ae07cc1 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 657.716802] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507660, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.755100] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c522044-eec9-4a8b-bacb-2c12a46ac71d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.762016] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-964e66af-29dc-4796-8ef4-316ceb20f14e tempest-ServersAdminNegativeTestJSON-761000828 tempest-ServersAdminNegativeTestJSON-761000828-project-admin] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Suspending the VM {{(pid=69475) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 657.762296] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-339de041-f0c1-420c-a8b2-179b255350c0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.768406] env[69475]: DEBUG oslo_vmware.api [None req-964e66af-29dc-4796-8ef4-316ceb20f14e tempest-ServersAdminNegativeTestJSON-761000828 tempest-ServersAdminNegativeTestJSON-761000828-project-admin] Waiting for the task: (returnval){ [ 657.768406] env[69475]: value = "task-3507661" [ 657.768406] env[69475]: _type = "Task" [ 657.768406] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.776982] env[69475]: DEBUG oslo_vmware.api [None req-964e66af-29dc-4796-8ef4-316ceb20f14e tempest-ServersAdminNegativeTestJSON-761000828 tempest-ServersAdminNegativeTestJSON-761000828-project-admin] Task: {'id': task-3507661, 'name': SuspendVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.902925] env[69475]: DEBUG oslo_concurrency.lockutils [None req-83b4af0d-9873-45ef-8100-bc48fab162b8 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "d1a316d5-59ef-4286-9d7e-a444ffadc49d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.916s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.939268] env[69475]: INFO nova.compute.manager [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Took 30.83 seconds to build instance. [ 658.106982] env[69475]: DEBUG nova.network.neutron [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 658.111183] env[69475]: DEBUG nova.network.neutron [req-3b679b48-2ef8-4073-848f-2c9f22b36b37 req-13226f99-cdb3-41c4-a524-f9e1af22af81 service nova] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Updated VIF entry in instance network info cache for port 4c4728a8-f4a8-44f7-9492-8e43fbf061ae. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 658.113147] env[69475]: DEBUG nova.network.neutron [req-3b679b48-2ef8-4073-848f-2c9f22b36b37 req-13226f99-cdb3-41c4-a524-f9e1af22af81 service nova] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Updating instance_info_cache with network_info: [{"id": "4c4728a8-f4a8-44f7-9492-8e43fbf061ae", "address": "fa:16:3e:1b:f2:66", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.219", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c4728a8-f4", "ovs_interfaceid": "4c4728a8-f4a8-44f7-9492-8e43fbf061ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.162215] env[69475]: DEBUG nova.compute.utils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 658.171022] env[69475]: DEBUG nova.compute.manager [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 658.171022] env[69475]: DEBUG nova.network.neutron [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 658.222537] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507660, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.278348] env[69475]: DEBUG oslo_vmware.api [None req-964e66af-29dc-4796-8ef4-316ceb20f14e tempest-ServersAdminNegativeTestJSON-761000828 tempest-ServersAdminNegativeTestJSON-761000828-project-admin] Task: {'id': task-3507661, 'name': SuspendVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.294275] env[69475]: DEBUG nova.policy [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35701016696a4f57a1c34462e46e99d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '02d595a3575a40799470947426047e69', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 658.405936] env[69475]: DEBUG nova.compute.manager [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 658.443558] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9273a6af-d986-4d64-bfb3-4c63b904bb43 tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Lock "3c253a57-1c93-4e8d-aaa1-1331c0547d85" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.363s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.484347] env[69475]: DEBUG nova.network.neutron [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Updating instance_info_cache with network_info: [{"id": "5fa5b65a-d1fb-4e45-8fea-68beefb4f999", "address": "fa:16:3e:0f:4c:48", "network": {"id": "c5897d3f-e695-4647-81ec-1226e3bd3b3c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1437300532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "02d595a3575a40799470947426047e69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fa5b65a-d1", "ovs_interfaceid": "5fa5b65a-d1fb-4e45-8fea-68beefb4f999", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.619155] env[69475]: DEBUG oslo_concurrency.lockutils [req-3b679b48-2ef8-4073-848f-2c9f22b36b37 req-13226f99-cdb3-41c4-a524-f9e1af22af81 service nova] Releasing lock "refresh_cache-77a5665d-b00f-42c2-a1e8-319dfd232b06" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 658.619505] env[69475]: DEBUG nova.compute.manager [req-3b679b48-2ef8-4073-848f-2c9f22b36b37 req-13226f99-cdb3-41c4-a524-f9e1af22af81 service nova] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Received event network-vif-deleted-242cecca-1cdb-42f1-92c0-0717cd78b7eb {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 658.671246] env[69475]: DEBUG nova.compute.manager [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 658.715994] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507660, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.717901] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4dd8196-99f0-47d2-90c1-3f21393c0f18 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.730406] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e601ca-062f-47e2-a6a2-833c60afa1ef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.766608] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a46532-0b02-4284-86a7-c4499302069e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.780779] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf615ee-c55f-4f97-bb63-28bf364eef77 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.785265] env[69475]: DEBUG oslo_vmware.api [None req-964e66af-29dc-4796-8ef4-316ceb20f14e tempest-ServersAdminNegativeTestJSON-761000828 tempest-ServersAdminNegativeTestJSON-761000828-project-admin] Task: {'id': task-3507661, 'name': SuspendVM_Task, 'duration_secs': 0.918746} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 658.785877] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-964e66af-29dc-4796-8ef4-316ceb20f14e tempest-ServersAdminNegativeTestJSON-761000828 tempest-ServersAdminNegativeTestJSON-761000828-project-admin] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Suspended the VM {{(pid=69475) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 658.786067] env[69475]: DEBUG nova.compute.manager [None req-964e66af-29dc-4796-8ef4-316ceb20f14e tempest-ServersAdminNegativeTestJSON-761000828 tempest-ServersAdminNegativeTestJSON-761000828-project-admin] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 658.787605] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed3bd7e-210e-425d-a215-45bcde22aeb5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.798896] env[69475]: DEBUG nova.compute.provider_tree [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 658.914179] env[69475]: DEBUG nova.network.neutron [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Successfully created port: a9de04f5-6001-4dc3-a305-3afbdf2429c6 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 658.940538] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 658.946952] env[69475]: DEBUG nova.compute.manager [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 658.989150] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Releasing lock "refresh_cache-3eda17da-111c-412d-9af4-d3a40b7d8faa" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 658.989484] env[69475]: DEBUG nova.compute.manager [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Instance network_info: |[{"id": "5fa5b65a-d1fb-4e45-8fea-68beefb4f999", "address": "fa:16:3e:0f:4c:48", "network": {"id": "c5897d3f-e695-4647-81ec-1226e3bd3b3c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1437300532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "02d595a3575a40799470947426047e69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fa5b65a-d1", "ovs_interfaceid": "5fa5b65a-d1fb-4e45-8fea-68beefb4f999", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 658.989906] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:4c:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f92f0b92-d6fb-4d00-8ad5-6b3809ed5493', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5fa5b65a-d1fb-4e45-8fea-68beefb4f999', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 658.999326] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Creating folder: Project (02d595a3575a40799470947426047e69). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 659.000561] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d487cb73-7955-4aad-9d81-11f71cfdbb89 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.011869] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Created folder: Project (02d595a3575a40799470947426047e69) in parent group-v700823. [ 659.012079] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Creating folder: Instances. Parent ref: group-v700884. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 659.012388] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb7cf09b-c33d-47cf-9270-7e80ee747ba1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.023427] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Created folder: Instances in parent group-v700884. [ 659.023427] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 659.025643] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 659.025643] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7bb377c9-7e3a-4f13-aa5e-39cd47dc21bf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.047115] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 659.047115] env[69475]: value = "task-3507664" [ 659.047115] env[69475]: _type = "Task" [ 659.047115] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.055164] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507664, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.206501] env[69475]: DEBUG oslo_concurrency.lockutils [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquiring lock "2dd98ffd-b0e6-4447-9c82-57713dc37abd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.206763] env[69475]: DEBUG oslo_concurrency.lockutils [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "2dd98ffd-b0e6-4447-9c82-57713dc37abd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.224984] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507660, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.595052} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.225320] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 77a5665d-b00f-42c2-a1e8-319dfd232b06/77a5665d-b00f-42c2-a1e8-319dfd232b06.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 659.225547] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 659.230044] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce3eb625-7f88-4dfa-8949-57fd2ee011e6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.235432] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for the task: (returnval){ [ 659.235432] env[69475]: value = "task-3507665" [ 659.235432] env[69475]: _type = "Task" [ 659.235432] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.246633] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507665, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.302928] env[69475]: DEBUG nova.scheduler.client.report [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 659.355399] env[69475]: DEBUG nova.compute.manager [req-6c65e73f-0af7-4898-84ec-7b05d4074985 req-e511968a-1756-40f5-b316-a3b057e59af1 service nova] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Received event network-vif-plugged-5fa5b65a-d1fb-4e45-8fea-68beefb4f999 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 659.355399] env[69475]: DEBUG oslo_concurrency.lockutils [req-6c65e73f-0af7-4898-84ec-7b05d4074985 req-e511968a-1756-40f5-b316-a3b057e59af1 service nova] Acquiring lock "3eda17da-111c-412d-9af4-d3a40b7d8faa-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.355399] env[69475]: DEBUG oslo_concurrency.lockutils [req-6c65e73f-0af7-4898-84ec-7b05d4074985 req-e511968a-1756-40f5-b316-a3b057e59af1 service nova] Lock "3eda17da-111c-412d-9af4-d3a40b7d8faa-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.355841] env[69475]: DEBUG oslo_concurrency.lockutils [req-6c65e73f-0af7-4898-84ec-7b05d4074985 req-e511968a-1756-40f5-b316-a3b057e59af1 service nova] Lock "3eda17da-111c-412d-9af4-d3a40b7d8faa-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.356410] env[69475]: DEBUG nova.compute.manager [req-6c65e73f-0af7-4898-84ec-7b05d4074985 req-e511968a-1756-40f5-b316-a3b057e59af1 service nova] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] No waiting events found dispatching network-vif-plugged-5fa5b65a-d1fb-4e45-8fea-68beefb4f999 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 659.356749] env[69475]: WARNING nova.compute.manager [req-6c65e73f-0af7-4898-84ec-7b05d4074985 req-e511968a-1756-40f5-b316-a3b057e59af1 service nova] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Received unexpected event network-vif-plugged-5fa5b65a-d1fb-4e45-8fea-68beefb4f999 for instance with vm_state building and task_state spawning. [ 659.358013] env[69475]: DEBUG nova.compute.manager [req-6c65e73f-0af7-4898-84ec-7b05d4074985 req-e511968a-1756-40f5-b316-a3b057e59af1 service nova] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Received event network-changed-5fa5b65a-d1fb-4e45-8fea-68beefb4f999 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 659.358229] env[69475]: DEBUG nova.compute.manager [req-6c65e73f-0af7-4898-84ec-7b05d4074985 req-e511968a-1756-40f5-b316-a3b057e59af1 service nova] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Refreshing instance network info cache due to event network-changed-5fa5b65a-d1fb-4e45-8fea-68beefb4f999. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 659.358442] env[69475]: DEBUG oslo_concurrency.lockutils [req-6c65e73f-0af7-4898-84ec-7b05d4074985 req-e511968a-1756-40f5-b316-a3b057e59af1 service nova] Acquiring lock "refresh_cache-3eda17da-111c-412d-9af4-d3a40b7d8faa" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.358580] env[69475]: DEBUG oslo_concurrency.lockutils [req-6c65e73f-0af7-4898-84ec-7b05d4074985 req-e511968a-1756-40f5-b316-a3b057e59af1 service nova] Acquired lock "refresh_cache-3eda17da-111c-412d-9af4-d3a40b7d8faa" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.358743] env[69475]: DEBUG nova.network.neutron [req-6c65e73f-0af7-4898-84ec-7b05d4074985 req-e511968a-1756-40f5-b316-a3b057e59af1 service nova] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Refreshing network info cache for port 5fa5b65a-d1fb-4e45-8fea-68beefb4f999 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 659.478939] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.563184] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507664, 'name': CreateVM_Task, 'duration_secs': 0.482628} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.563184] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 659.563450] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.563712] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.563942] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 659.564155] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-296a72a6-a9ec-454d-b675-b62b939d77b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.569427] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 659.569427] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52027db8-625b-30ce-7e0c-30c76d233f29" [ 659.569427] env[69475]: _type = "Task" [ 659.569427] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.578030] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52027db8-625b-30ce-7e0c-30c76d233f29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.683956] env[69475]: DEBUG nova.compute.manager [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 659.710689] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 659.710939] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 659.711110] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 659.711294] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 659.711485] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 659.711645] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 659.711869] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 659.712100] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 659.712190] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 659.712352] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 659.712526] env[69475]: DEBUG nova.virt.hardware [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 659.713479] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da22519e-8883-4eed-bc9e-40d0a7b28a77 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.722072] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e332c4-05f3-4f8f-83d6-787f4dda648f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.744826] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507665, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093278} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.745680] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 659.746678] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b604b362-36f2-416c-97da-aefc589bb10f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.770486] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Reconfiguring VM instance instance-00000015 to attach disk [datastore2] 77a5665d-b00f-42c2-a1e8-319dfd232b06/77a5665d-b00f-42c2-a1e8-319dfd232b06.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 659.770807] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ffa0328-f42a-4b1d-a9c0-ced541430c1c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.790777] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for the task: (returnval){ [ 659.790777] env[69475]: value = "task-3507666" [ 659.790777] env[69475]: _type = "Task" [ 659.790777] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.799974] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507666, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.807858] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.153s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.810766] env[69475]: DEBUG oslo_concurrency.lockutils [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.286s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.811902] env[69475]: INFO nova.compute.claims [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 659.828188] env[69475]: INFO nova.scheduler.client.report [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Deleted allocations for instance af5dc581-cf6a-4b84-8bcf-96606ae07cc1 [ 660.083539] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52027db8-625b-30ce-7e0c-30c76d233f29, 'name': SearchDatastore_Task, 'duration_secs': 0.015727} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.088190] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.088190] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 660.088190] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 660.088322] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 660.088456] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 660.088735] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d092f25-2f27-4822-b0be-42ee3ce8b53e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.100625] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 660.101616] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 660.101905] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d38b2d9a-1d8b-4c77-9fd0-a597ce9d772c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.109100] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 660.109100] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52da592f-6590-624b-6a43-14f220520fd1" [ 660.109100] env[69475]: _type = "Task" [ 660.109100] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.119574] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52da592f-6590-624b-6a43-14f220520fd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.200101] env[69475]: DEBUG nova.network.neutron [req-6c65e73f-0af7-4898-84ec-7b05d4074985 req-e511968a-1756-40f5-b316-a3b057e59af1 service nova] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Updated VIF entry in instance network info cache for port 5fa5b65a-d1fb-4e45-8fea-68beefb4f999. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 660.200101] env[69475]: DEBUG nova.network.neutron [req-6c65e73f-0af7-4898-84ec-7b05d4074985 req-e511968a-1756-40f5-b316-a3b057e59af1 service nova] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Updating instance_info_cache with network_info: [{"id": "5fa5b65a-d1fb-4e45-8fea-68beefb4f999", "address": "fa:16:3e:0f:4c:48", "network": {"id": "c5897d3f-e695-4647-81ec-1226e3bd3b3c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1437300532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "02d595a3575a40799470947426047e69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fa5b65a-d1", "ovs_interfaceid": "5fa5b65a-d1fb-4e45-8fea-68beefb4f999", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.306807] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507666, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.338154] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4caa2ba3-56b4-464b-873e-49032068488c tempest-ServerExternalEventsTest-1377643665 tempest-ServerExternalEventsTest-1377643665-project-member] Lock "af5dc581-cf6a-4b84-8bcf-96606ae07cc1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.984s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.626723] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52da592f-6590-624b-6a43-14f220520fd1, 'name': SearchDatastore_Task, 'duration_secs': 0.01173} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.626723] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24c837f4-8371-432b-bf68-4e72df0d17ad {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.632052] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 660.632052] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529c0722-329a-7ec6-f23a-46195b653d7b" [ 660.632052] env[69475]: _type = "Task" [ 660.632052] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.642603] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529c0722-329a-7ec6-f23a-46195b653d7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.703371] env[69475]: DEBUG oslo_concurrency.lockutils [req-6c65e73f-0af7-4898-84ec-7b05d4074985 req-e511968a-1756-40f5-b316-a3b057e59af1 service nova] Releasing lock "refresh_cache-3eda17da-111c-412d-9af4-d3a40b7d8faa" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.802714] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507666, 'name': ReconfigVM_Task, 'duration_secs': 0.542175} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.802996] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Reconfigured VM instance instance-00000015 to attach disk [datastore2] 77a5665d-b00f-42c2-a1e8-319dfd232b06/77a5665d-b00f-42c2-a1e8-319dfd232b06.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 660.803941] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3f38022e-58a7-461f-bbea-2b7557b5a422 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.812300] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for the task: (returnval){ [ 660.812300] env[69475]: value = "task-3507667" [ 660.812300] env[69475]: _type = "Task" [ 660.812300] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.828143] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507667, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.159499] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529c0722-329a-7ec6-f23a-46195b653d7b, 'name': SearchDatastore_Task, 'duration_secs': 0.033198} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.159973] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.159973] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 3eda17da-111c-412d-9af4-d3a40b7d8faa/3eda17da-111c-412d-9af4-d3a40b7d8faa.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 661.160314] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5079c5f8-1c2d-4fae-bb9b-9c5c039120a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.167000] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 661.167000] env[69475]: value = "task-3507672" [ 661.167000] env[69475]: _type = "Task" [ 661.167000] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.179144] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507672, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.229346] env[69475]: DEBUG nova.network.neutron [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Successfully updated port: a9de04f5-6001-4dc3-a305-3afbdf2429c6 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 661.328740] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507667, 'name': Rename_Task, 'duration_secs': 0.489836} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.329343] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 661.329717] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c4f0aa1c-82aa-40ec-aa73-3afb33608429 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.339829] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for the task: (returnval){ [ 661.339829] env[69475]: value = "task-3507673" [ 661.339829] env[69475]: _type = "Task" [ 661.339829] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.349789] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507673, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.408859] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84064676-dd22-4a2c-a458-d86d4c69c756 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.422301] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde2552d-31b1-46b0-b237-0c98c779fb8f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.457695] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba77c4c3-6e6f-41e8-b489-7c20f5399cea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.466525] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46427763-4ea2-45fb-b023-70e9143ad5b0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.480796] env[69475]: DEBUG nova.compute.provider_tree [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 661.553344] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Acquiring lock "3c253a57-1c93-4e8d-aaa1-1331c0547d85" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.553678] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Lock "3c253a57-1c93-4e8d-aaa1-1331c0547d85" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.553950] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Acquiring lock "3c253a57-1c93-4e8d-aaa1-1331c0547d85-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.554139] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Lock "3c253a57-1c93-4e8d-aaa1-1331c0547d85-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.555261] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Lock "3c253a57-1c93-4e8d-aaa1-1331c0547d85-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 661.557884] env[69475]: INFO nova.compute.manager [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Terminating instance [ 661.679703] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507672, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.733442] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "refresh_cache-c078753c-48a6-490b-8d7d-b0832eced25e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.733598] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquired lock "refresh_cache-c078753c-48a6-490b-8d7d-b0832eced25e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.733836] env[69475]: DEBUG nova.network.neutron [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 661.802882] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquiring lock "7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 661.803480] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 661.850657] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507673, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.984399] env[69475]: DEBUG nova.scheduler.client.report [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 662.062561] env[69475]: DEBUG nova.compute.manager [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 662.062561] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 662.064474] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52351aef-2aef-4ac7-a26c-bed1f7c365be {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.072593] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 662.074237] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e647b48e-3134-4463-9013-b25ba8ad0e26 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.081341] env[69475]: DEBUG oslo_vmware.api [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Waiting for the task: (returnval){ [ 662.081341] env[69475]: value = "task-3507674" [ 662.081341] env[69475]: _type = "Task" [ 662.081341] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.090094] env[69475]: DEBUG oslo_vmware.api [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': task-3507674, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.118583] env[69475]: DEBUG nova.compute.manager [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Received event network-changed-4bac6b08-29dc-45f8-bd32-4adb28c6ea48 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 662.118583] env[69475]: DEBUG nova.compute.manager [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Refreshing instance network info cache due to event network-changed-4bac6b08-29dc-45f8-bd32-4adb28c6ea48. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 662.118583] env[69475]: DEBUG oslo_concurrency.lockutils [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] Acquiring lock "refresh_cache-d1a316d5-59ef-4286-9d7e-a444ffadc49d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 662.118583] env[69475]: DEBUG oslo_concurrency.lockutils [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] Acquired lock "refresh_cache-d1a316d5-59ef-4286-9d7e-a444ffadc49d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 662.118583] env[69475]: DEBUG nova.network.neutron [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Refreshing network info cache for port 4bac6b08-29dc-45f8-bd32-4adb28c6ea48 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 662.185322] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507672, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584384} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.185536] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 3eda17da-111c-412d-9af4-d3a40b7d8faa/3eda17da-111c-412d-9af4-d3a40b7d8faa.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 662.185605] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 662.185831] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-225750ec-4d19-4534-8f8f-5514998ff3be {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.193311] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 662.193311] env[69475]: value = "task-3507675" [ 662.193311] env[69475]: _type = "Task" [ 662.193311] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.203445] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507675, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.330087] env[69475]: DEBUG nova.network.neutron [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 662.355317] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507673, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.490239] env[69475]: DEBUG oslo_concurrency.lockutils [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.680s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 662.490836] env[69475]: DEBUG nova.compute.manager [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 662.495320] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.731s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 662.495320] env[69475]: INFO nova.compute.claims [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.591232] env[69475]: DEBUG oslo_vmware.api [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': task-3507674, 'name': PowerOffVM_Task, 'duration_secs': 0.262839} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.591618] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 662.591792] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 662.592061] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3fb9ad22-9d69-427e-b739-03cb2e50f3ff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.642948] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquiring lock "b71882d4-537d-4a90-b43d-f8ac4ca0d90c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.643593] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "b71882d4-537d-4a90-b43d-f8ac4ca0d90c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 662.671012] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 662.671261] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 662.671425] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Deleting the datastore file [datastore2] 3c253a57-1c93-4e8d-aaa1-1331c0547d85 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 662.671705] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb98656e-0a11-4b7c-a630-3f704a965565 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.677769] env[69475]: DEBUG oslo_vmware.api [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Waiting for the task: (returnval){ [ 662.677769] env[69475]: value = "task-3507677" [ 662.677769] env[69475]: _type = "Task" [ 662.677769] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.683678] env[69475]: DEBUG nova.network.neutron [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Updating instance_info_cache with network_info: [{"id": "a9de04f5-6001-4dc3-a305-3afbdf2429c6", "address": "fa:16:3e:ab:79:eb", "network": {"id": "c5897d3f-e695-4647-81ec-1226e3bd3b3c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1437300532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "02d595a3575a40799470947426047e69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9de04f5-60", "ovs_interfaceid": "a9de04f5-6001-4dc3-a305-3afbdf2429c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.689018] env[69475]: DEBUG oslo_vmware.api [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': task-3507677, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.712097] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507675, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075218} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.712496] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 662.713628] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dba3ccf-2287-4392-9294-9c98032596a2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.738613] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Reconfiguring VM instance instance-00000016 to attach disk [datastore2] 3eda17da-111c-412d-9af4-d3a40b7d8faa/3eda17da-111c-412d-9af4-d3a40b7d8faa.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 662.739056] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6eaafa40-6946-4a8b-bcad-797e0c8bfe2d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.759515] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 662.759515] env[69475]: value = "task-3507678" [ 662.759515] env[69475]: _type = "Task" [ 662.759515] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.767976] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507678, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.853723] env[69475]: DEBUG oslo_vmware.api [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507673, 'name': PowerOnVM_Task, 'duration_secs': 1.50355} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.854089] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 662.854333] env[69475]: INFO nova.compute.manager [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Took 10.75 seconds to spawn the instance on the hypervisor. [ 662.854573] env[69475]: DEBUG nova.compute.manager [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 662.855439] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6846b946-19bb-4d3b-8929-e2a5392ff775 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.003030] env[69475]: DEBUG nova.compute.utils [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 663.007063] env[69475]: DEBUG nova.compute.manager [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 663.007063] env[69475]: DEBUG nova.network.neutron [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 663.141789] env[69475]: DEBUG nova.policy [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '75890ebcdfd74806a99aa2924fc82074', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c112f0c0629d4bf9a01e59342b38da87', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 663.187467] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Releasing lock "refresh_cache-c078753c-48a6-490b-8d7d-b0832eced25e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.188279] env[69475]: DEBUG nova.compute.manager [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Instance network_info: |[{"id": "a9de04f5-6001-4dc3-a305-3afbdf2429c6", "address": "fa:16:3e:ab:79:eb", "network": {"id": "c5897d3f-e695-4647-81ec-1226e3bd3b3c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1437300532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "02d595a3575a40799470947426047e69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9de04f5-60", "ovs_interfaceid": "a9de04f5-6001-4dc3-a305-3afbdf2429c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 663.188279] env[69475]: DEBUG oslo_vmware.api [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Task: {'id': task-3507677, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.245489} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.188473] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ab:79:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f92f0b92-d6fb-4d00-8ad5-6b3809ed5493', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9de04f5-6001-4dc3-a305-3afbdf2429c6', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 663.196154] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 663.196385] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 663.196565] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 663.196745] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 663.196913] env[69475]: INFO nova.compute.manager [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Took 1.13 seconds to destroy the instance on the hypervisor. [ 663.197148] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 663.197334] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 663.197519] env[69475]: DEBUG nova.compute.manager [-] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 663.197610] env[69475]: DEBUG nova.network.neutron [-] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 663.199727] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b098eec-6707-49f5-a934-06c64352c356 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.222718] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 663.222718] env[69475]: value = "task-3507679" [ 663.222718] env[69475]: _type = "Task" [ 663.222718] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.231253] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507679, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.271791] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507678, 'name': ReconfigVM_Task, 'duration_secs': 0.290421} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.272152] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Reconfigured VM instance instance-00000016 to attach disk [datastore2] 3eda17da-111c-412d-9af4-d3a40b7d8faa/3eda17da-111c-412d-9af4-d3a40b7d8faa.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 663.273504] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d2bce4b-5827-4d38-9f81-1ac71c42cbef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.280571] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 663.280571] env[69475]: value = "task-3507680" [ 663.280571] env[69475]: _type = "Task" [ 663.280571] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.290716] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507680, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.373234] env[69475]: INFO nova.compute.manager [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Took 34.58 seconds to build instance. [ 663.504444] env[69475]: DEBUG nova.compute.manager [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 663.617994] env[69475]: DEBUG nova.network.neutron [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Updated VIF entry in instance network info cache for port 4bac6b08-29dc-45f8-bd32-4adb28c6ea48. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 663.620315] env[69475]: DEBUG nova.network.neutron [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Updating instance_info_cache with network_info: [{"id": "4bac6b08-29dc-45f8-bd32-4adb28c6ea48", "address": "fa:16:3e:50:f7:f7", "network": {"id": "1ccefa75-7f28-427a-a2dc-65225b56bc7d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-673445520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47bcbe5bc3a14fbf9ea9617ea7d50342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bac6b08-29", "ovs_interfaceid": "4bac6b08-29dc-45f8-bd32-4adb28c6ea48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.738037] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507679, 'name': CreateVM_Task, 'duration_secs': 0.36328} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.738037] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 663.738670] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.738835] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.739213] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 663.739481] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fac02595-9e60-49cc-99c3-83aa0100f89c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.746177] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 663.746177] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522e9fa9-3b4e-73fb-1218-b3fdc4bd6fa0" [ 663.746177] env[69475]: _type = "Task" [ 663.746177] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.761048] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522e9fa9-3b4e-73fb-1218-b3fdc4bd6fa0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.771817] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "4b3b53d1-82bf-40e7-9988-af7b51e9883a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.774050] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "4b3b53d1-82bf-40e7-9988-af7b51e9883a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.790935] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507680, 'name': Rename_Task, 'duration_secs': 0.145566} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.794021] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 663.794488] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d1de38d-4c64-4b8e-9fa6-72beb3e2cc20 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.801875] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 663.801875] env[69475]: value = "task-3507681" [ 663.801875] env[69475]: _type = "Task" [ 663.801875] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.812376] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507681, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.875154] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9d95e6b0-01bd-4cec-b3de-05c3d9b2eec9 tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Lock "77a5665d-b00f-42c2-a1e8-319dfd232b06" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.370s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 664.063767] env[69475]: DEBUG nova.network.neutron [-] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.113286] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44c63e41-eb7a-482d-906b-d3446dd899a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.123535] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8caa8a-ba98-4ef2-9f06-1a1a96fcc1d9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.129571] env[69475]: DEBUG oslo_concurrency.lockutils [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] Releasing lock "refresh_cache-d1a316d5-59ef-4286-9d7e-a444ffadc49d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.130052] env[69475]: DEBUG nova.compute.manager [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Received event network-changed-4bac6b08-29dc-45f8-bd32-4adb28c6ea48 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 664.130207] env[69475]: DEBUG nova.compute.manager [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Refreshing instance network info cache due to event network-changed-4bac6b08-29dc-45f8-bd32-4adb28c6ea48. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 664.130729] env[69475]: DEBUG oslo_concurrency.lockutils [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] Acquiring lock "refresh_cache-d1a316d5-59ef-4286-9d7e-a444ffadc49d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.130966] env[69475]: DEBUG oslo_concurrency.lockutils [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] Acquired lock "refresh_cache-d1a316d5-59ef-4286-9d7e-a444ffadc49d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.131349] env[69475]: DEBUG nova.network.neutron [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Refreshing network info cache for port 4bac6b08-29dc-45f8-bd32-4adb28c6ea48 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 664.164142] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde8af43-94b6-44eb-acf0-7fef6abcc6fc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.173557] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea1d62b-0e80-4194-8031-c3be3e8a4615 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.190425] env[69475]: DEBUG nova.compute.provider_tree [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 664.224140] env[69475]: DEBUG nova.network.neutron [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Successfully created port: 32a4878e-f7f5-490d-a877-d01cb7eaa6dc {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 664.259795] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522e9fa9-3b4e-73fb-1218-b3fdc4bd6fa0, 'name': SearchDatastore_Task, 'duration_secs': 0.011271} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.260816] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.260816] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 664.260816] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.260816] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.261193] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 664.261469] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b7e7abf-ede6-4d31-8181-25f81aedf706 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.281038] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 664.281260] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 664.282137] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e87f6e1e-7801-4b0d-82d0-abf61a96f87b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.290166] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 664.290166] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52290134-be86-409d-d7c6-33eee48810f1" [ 664.290166] env[69475]: _type = "Task" [ 664.290166] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.302886] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52290134-be86-409d-d7c6-33eee48810f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.313047] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507681, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.377717] env[69475]: DEBUG nova.compute.manager [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 664.397497] env[69475]: DEBUG nova.compute.manager [req-17e7a226-eddc-4ced-9d34-18570b4c256f req-4d5e825a-ee95-4d69-a6f4-f05a6d2af0b8 service nova] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Received event network-vif-deleted-70facb17-ed67-480e-b18e-81f937380d97 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 664.525093] env[69475]: DEBUG nova.compute.manager [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 664.555222] env[69475]: DEBUG nova.virt.hardware [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 664.555543] env[69475]: DEBUG nova.virt.hardware [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 664.555792] env[69475]: DEBUG nova.virt.hardware [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 664.556042] env[69475]: DEBUG nova.virt.hardware [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 664.556232] env[69475]: DEBUG nova.virt.hardware [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 664.556427] env[69475]: DEBUG nova.virt.hardware [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 664.557068] env[69475]: DEBUG nova.virt.hardware [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 664.557068] env[69475]: DEBUG nova.virt.hardware [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 664.557201] env[69475]: DEBUG nova.virt.hardware [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 664.557344] env[69475]: DEBUG nova.virt.hardware [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 664.557541] env[69475]: DEBUG nova.virt.hardware [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 664.558511] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ec3846-c132-42ae-ac1b-fad6435a7ae6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.567492] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53a3965b-6f54-490b-ac06-752f46287a46 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.572618] env[69475]: INFO nova.compute.manager [-] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Took 1.37 seconds to deallocate network for instance. [ 664.693573] env[69475]: DEBUG nova.scheduler.client.report [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 664.802143] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52290134-be86-409d-d7c6-33eee48810f1, 'name': SearchDatastore_Task, 'duration_secs': 0.01366} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.806286] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ce1390b-a767-452c-951b-4446e0ce9edc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.817704] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 664.817704] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f0c651-cf1b-1159-f28f-ab7a3fc157b2" [ 664.817704] env[69475]: _type = "Task" [ 664.817704] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.821205] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507681, 'name': PowerOnVM_Task, 'duration_secs': 0.57972} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.825352] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 664.827967] env[69475]: INFO nova.compute.manager [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Took 10.02 seconds to spawn the instance on the hypervisor. [ 664.827967] env[69475]: DEBUG nova.compute.manager [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 664.827967] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e5d93f-955a-472e-b86a-14bc1ad0b51c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.837805] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f0c651-cf1b-1159-f28f-ab7a3fc157b2, 'name': SearchDatastore_Task, 'duration_secs': 0.011567} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.839669] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.841438] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] c078753c-48a6-490b-8d7d-b0832eced25e/c078753c-48a6-490b-8d7d-b0832eced25e.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 664.843816] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8d0a94a-c437-48e6-92cc-ec4d7e118013 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.853186] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 664.853186] env[69475]: value = "task-3507683" [ 664.853186] env[69475]: _type = "Task" [ 664.853186] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.863358] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507683, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.913502] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 664.974997] env[69475]: DEBUG nova.network.neutron [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Updated VIF entry in instance network info cache for port 4bac6b08-29dc-45f8-bd32-4adb28c6ea48. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 664.974997] env[69475]: DEBUG nova.network.neutron [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Updating instance_info_cache with network_info: [{"id": "4bac6b08-29dc-45f8-bd32-4adb28c6ea48", "address": "fa:16:3e:50:f7:f7", "network": {"id": "1ccefa75-7f28-427a-a2dc-65225b56bc7d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-673445520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47bcbe5bc3a14fbf9ea9617ea7d50342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4bac6b08-29", "ovs_interfaceid": "4bac6b08-29dc-45f8-bd32-4adb28c6ea48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.088627] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.202319] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.706s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.202319] env[69475]: DEBUG nova.compute.manager [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 665.203359] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.336s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.205080] env[69475]: INFO nova.compute.claims [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 665.363109] env[69475]: INFO nova.compute.manager [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Took 32.69 seconds to build instance. [ 665.367847] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507683, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.484798} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.368158] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] c078753c-48a6-490b-8d7d-b0832eced25e/c078753c-48a6-490b-8d7d-b0832eced25e.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 665.368843] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 665.369158] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-59ac9a3b-210d-4ada-ab7b-07e23fd4b415 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.377570] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 665.377570] env[69475]: value = "task-3507684" [ 665.377570] env[69475]: _type = "Task" [ 665.377570] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.389901] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507684, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.478429] env[69475]: DEBUG oslo_concurrency.lockutils [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] Releasing lock "refresh_cache-d1a316d5-59ef-4286-9d7e-a444ffadc49d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.478709] env[69475]: DEBUG nova.compute.manager [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Received event network-vif-plugged-a9de04f5-6001-4dc3-a305-3afbdf2429c6 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 665.478905] env[69475]: DEBUG oslo_concurrency.lockutils [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] Acquiring lock "c078753c-48a6-490b-8d7d-b0832eced25e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.479128] env[69475]: DEBUG oslo_concurrency.lockutils [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] Lock "c078753c-48a6-490b-8d7d-b0832eced25e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.479299] env[69475]: DEBUG oslo_concurrency.lockutils [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] Lock "c078753c-48a6-490b-8d7d-b0832eced25e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.479463] env[69475]: DEBUG nova.compute.manager [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] No waiting events found dispatching network-vif-plugged-a9de04f5-6001-4dc3-a305-3afbdf2429c6 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 665.479632] env[69475]: WARNING nova.compute.manager [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Received unexpected event network-vif-plugged-a9de04f5-6001-4dc3-a305-3afbdf2429c6 for instance with vm_state building and task_state spawning. [ 665.479794] env[69475]: DEBUG nova.compute.manager [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Received event network-changed-a9de04f5-6001-4dc3-a305-3afbdf2429c6 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 665.479949] env[69475]: DEBUG nova.compute.manager [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Refreshing instance network info cache due to event network-changed-a9de04f5-6001-4dc3-a305-3afbdf2429c6. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 665.480145] env[69475]: DEBUG oslo_concurrency.lockutils [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] Acquiring lock "refresh_cache-c078753c-48a6-490b-8d7d-b0832eced25e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.480324] env[69475]: DEBUG oslo_concurrency.lockutils [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] Acquired lock "refresh_cache-c078753c-48a6-490b-8d7d-b0832eced25e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.480465] env[69475]: DEBUG nova.network.neutron [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Refreshing network info cache for port a9de04f5-6001-4dc3-a305-3afbdf2429c6 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 665.495415] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Acquiring lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 665.495818] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.712294] env[69475]: DEBUG nova.compute.utils [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 665.717162] env[69475]: DEBUG nova.compute.manager [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Not allocating networking since 'none' was specified. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 665.868641] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "3eda17da-111c-412d-9af4-d3a40b7d8faa" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.076s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.892303] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507684, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06747} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.892303] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 665.893063] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8085017-6d49-4a52-af70-e2912d28135c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.926962] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] c078753c-48a6-490b-8d7d-b0832eced25e/c078753c-48a6-490b-8d7d-b0832eced25e.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 665.927362] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b6ba136-c628-4628-a642-21303f4c7ea7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.986234] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 665.986234] env[69475]: value = "task-3507686" [ 665.986234] env[69475]: _type = "Task" [ 665.986234] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.986234] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507686, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.003062] env[69475]: DEBUG nova.compute.utils [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 666.217864] env[69475]: DEBUG nova.compute.manager [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 666.372396] env[69475]: DEBUG nova.compute.manager [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 666.384500] env[69475]: DEBUG nova.network.neutron [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Updated VIF entry in instance network info cache for port a9de04f5-6001-4dc3-a305-3afbdf2429c6. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 666.385716] env[69475]: DEBUG nova.network.neutron [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Updating instance_info_cache with network_info: [{"id": "a9de04f5-6001-4dc3-a305-3afbdf2429c6", "address": "fa:16:3e:ab:79:eb", "network": {"id": "c5897d3f-e695-4647-81ec-1226e3bd3b3c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1437300532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02d595a3575a40799470947426047e69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9de04f5-60", "ovs_interfaceid": "a9de04f5-6001-4dc3-a305-3afbdf2429c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.402768] env[69475]: DEBUG nova.network.neutron [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Successfully updated port: 32a4878e-f7f5-490d-a877-d01cb7eaa6dc {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 666.462580] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507686, 'name': ReconfigVM_Task, 'duration_secs': 0.29106} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.465527] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Reconfigured VM instance instance-00000017 to attach disk [datastore1] c078753c-48a6-490b-8d7d-b0832eced25e/c078753c-48a6-490b-8d7d-b0832eced25e.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 666.466518] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-70a3ced2-a14f-4682-9fb5-3c501ea7f67e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.473048] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 666.473048] env[69475]: value = "task-3507687" [ 666.473048] env[69475]: _type = "Task" [ 666.473048] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.481968] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507687, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.509147] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.013s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.752204] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8eaaab4-68e6-4813-891a-19ef93b9251d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.761794] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb66795d-8852-4b8c-805b-8b4c5c73746b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.802290] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24949f82-5048-4915-93e6-09cf55b23b81 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.805756] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquiring lock "77a5665d-b00f-42c2-a1e8-319dfd232b06" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 666.805756] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Lock "77a5665d-b00f-42c2-a1e8-319dfd232b06" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.805756] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquiring lock "77a5665d-b00f-42c2-a1e8-319dfd232b06-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 666.805756] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Lock "77a5665d-b00f-42c2-a1e8-319dfd232b06-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 666.805923] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Lock "77a5665d-b00f-42c2-a1e8-319dfd232b06-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 666.808993] env[69475]: INFO nova.compute.manager [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Terminating instance [ 666.814050] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ff169e4-7af1-48e3-aa14-d2a066d76088 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.829992] env[69475]: DEBUG nova.compute.provider_tree [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.889670] env[69475]: DEBUG oslo_concurrency.lockutils [req-42d5d629-b680-4dc2-bd2f-79ff5b306cc7 req-b65a6cfc-539e-44b9-a193-86ada72ee924 service nova] Releasing lock "refresh_cache-c078753c-48a6-490b-8d7d-b0832eced25e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.894177] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 666.908318] env[69475]: DEBUG oslo_concurrency.lockutils [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Acquiring lock "refresh_cache-8f65d893-d2e2-452f-8870-f72ec036f16a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.908318] env[69475]: DEBUG oslo_concurrency.lockutils [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Acquired lock "refresh_cache-8f65d893-d2e2-452f-8870-f72ec036f16a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 666.908318] env[69475]: DEBUG nova.network.neutron [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 666.984152] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507687, 'name': Rename_Task, 'duration_secs': 0.156515} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.985322] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 666.985322] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf5efdb0-11a6-4b53-8d46-fc20a5434d54 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.991792] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 666.991792] env[69475]: value = "task-3507688" [ 666.991792] env[69475]: _type = "Task" [ 666.991792] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.003040] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507688, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.101716] env[69475]: DEBUG nova.compute.manager [req-13e7617e-0ec3-4d55-a756-4fe25f2dbf95 req-f55261ad-f7f1-47e0-9cfc-fcba0b7fb9c2 service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Received event network-vif-plugged-32a4878e-f7f5-490d-a877-d01cb7eaa6dc {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 667.103469] env[69475]: DEBUG oslo_concurrency.lockutils [req-13e7617e-0ec3-4d55-a756-4fe25f2dbf95 req-f55261ad-f7f1-47e0-9cfc-fcba0b7fb9c2 service nova] Acquiring lock "8f65d893-d2e2-452f-8870-f72ec036f16a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.103932] env[69475]: DEBUG oslo_concurrency.lockutils [req-13e7617e-0ec3-4d55-a756-4fe25f2dbf95 req-f55261ad-f7f1-47e0-9cfc-fcba0b7fb9c2 service nova] Lock "8f65d893-d2e2-452f-8870-f72ec036f16a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.104261] env[69475]: DEBUG oslo_concurrency.lockutils [req-13e7617e-0ec3-4d55-a756-4fe25f2dbf95 req-f55261ad-f7f1-47e0-9cfc-fcba0b7fb9c2 service nova] Lock "8f65d893-d2e2-452f-8870-f72ec036f16a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.104640] env[69475]: DEBUG nova.compute.manager [req-13e7617e-0ec3-4d55-a756-4fe25f2dbf95 req-f55261ad-f7f1-47e0-9cfc-fcba0b7fb9c2 service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] No waiting events found dispatching network-vif-plugged-32a4878e-f7f5-490d-a877-d01cb7eaa6dc {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 667.105652] env[69475]: WARNING nova.compute.manager [req-13e7617e-0ec3-4d55-a756-4fe25f2dbf95 req-f55261ad-f7f1-47e0-9cfc-fcba0b7fb9c2 service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Received unexpected event network-vif-plugged-32a4878e-f7f5-490d-a877-d01cb7eaa6dc for instance with vm_state building and task_state spawning. [ 667.236416] env[69475]: DEBUG nova.compute.manager [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 667.266036] env[69475]: DEBUG nova.virt.hardware [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 667.266036] env[69475]: DEBUG nova.virt.hardware [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 667.266036] env[69475]: DEBUG nova.virt.hardware [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 667.266036] env[69475]: DEBUG nova.virt.hardware [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 667.266269] env[69475]: DEBUG nova.virt.hardware [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 667.267632] env[69475]: DEBUG nova.virt.hardware [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 667.267632] env[69475]: DEBUG nova.virt.hardware [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 667.267632] env[69475]: DEBUG nova.virt.hardware [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 667.267632] env[69475]: DEBUG nova.virt.hardware [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 667.267788] env[69475]: DEBUG nova.virt.hardware [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 667.267920] env[69475]: DEBUG nova.virt.hardware [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 667.272023] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b2da144-3641-4210-beb3-32ff18f26b83 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.284526] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2c6266-7d06-47e2-9764-cd7d103ebbb6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.306027] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Instance VIF info [] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 667.312739] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Creating folder: Project (80864cf1d33947a3ae6447976ca25644). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 667.313171] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37489031-42e9-4dd9-a780-5af3940b055a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.320114] env[69475]: DEBUG nova.compute.manager [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 667.320289] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 667.321185] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9a9a413-0024-4617-adfd-0cd1cfdc4418 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.325763] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Created folder: Project (80864cf1d33947a3ae6447976ca25644) in parent group-v700823. [ 667.325966] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Creating folder: Instances. Parent ref: group-v700891. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 667.326647] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8018861e-caa4-4038-88d1-2ff3d9d6cb30 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.331083] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 667.331304] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fdfcb38-6f6a-4052-a0ab-4d5763431764 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.333444] env[69475]: DEBUG nova.scheduler.client.report [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 667.341142] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Created folder: Instances in parent group-v700891. [ 667.341142] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 667.341142] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 667.341142] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94d41907-5c7d-4565-acf9-d26ed136d02d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.361212] env[69475]: DEBUG oslo_vmware.api [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for the task: (returnval){ [ 667.361212] env[69475]: value = "task-3507691" [ 667.361212] env[69475]: _type = "Task" [ 667.361212] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.367669] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 667.367669] env[69475]: value = "task-3507692" [ 667.367669] env[69475]: _type = "Task" [ 667.367669] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.375292] env[69475]: DEBUG oslo_vmware.api [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507691, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.381706] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507692, 'name': CreateVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.487762] env[69475]: DEBUG nova.network.neutron [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 667.504501] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507688, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.634604] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Acquiring lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.635045] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.635132] env[69475]: INFO nova.compute.manager [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Attaching volume abac38e1-aef6-4f0f-88d9-c61aede17432 to /dev/sdb [ 667.688612] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7507ee85-c0fd-4077-bf6c-5b6af88c07d0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.695739] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78416c8-4f32-4c02-b332-f5ddc9e39875 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.710209] env[69475]: DEBUG nova.virt.block_device [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Updating existing volume attachment record: c45eb2a4-c437-4a95-a22f-0b27ccaa545e {{(pid=69475) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 667.756132] env[69475]: DEBUG nova.network.neutron [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Updating instance_info_cache with network_info: [{"id": "32a4878e-f7f5-490d-a877-d01cb7eaa6dc", "address": "fa:16:3e:c3:b0:ae", "network": {"id": "de4f7b85-fb16-4097-91e9-9f3cf05371be", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-742523535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "c112f0c0629d4bf9a01e59342b38da87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23f4655e-3495-421d-be4e-f6002a85a47a", "external-id": "nsx-vlan-transportzone-520", "segmentation_id": 520, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32a4878e-f7", "ovs_interfaceid": "32a4878e-f7f5-490d-a877-d01cb7eaa6dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.860675] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.657s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.862743] env[69475]: DEBUG nova.compute.manager [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 667.864337] env[69475]: DEBUG oslo_concurrency.lockutils [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 19.888s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.864570] env[69475]: DEBUG nova.objects.instance [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69475) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 667.891891] env[69475]: DEBUG oslo_vmware.api [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507691, 'name': PowerOffVM_Task, 'duration_secs': 0.220148} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.892531] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507692, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.892939] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 667.893256] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 667.893912] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-238e5709-cebc-4350-b6f2-e91f6fc8e68a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.971844] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 667.972194] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 667.978456] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 667.978456] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 667.978456] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Deleting the datastore file [datastore2] 77a5665d-b00f-42c2-a1e8-319dfd232b06 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 667.978456] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-324ec16a-1bb3-4f84-8b5c-ce412989ca36 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.985169] env[69475]: DEBUG oslo_vmware.api [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for the task: (returnval){ [ 667.985169] env[69475]: value = "task-3507696" [ 667.985169] env[69475]: _type = "Task" [ 667.985169] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.994179] env[69475]: DEBUG oslo_vmware.api [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507696, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.003105] env[69475]: DEBUG oslo_vmware.api [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507688, 'name': PowerOnVM_Task, 'duration_secs': 0.532268} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.003377] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 668.003574] env[69475]: INFO nova.compute.manager [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Took 8.32 seconds to spawn the instance on the hypervisor. [ 668.003751] env[69475]: DEBUG nova.compute.manager [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 668.004589] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304f0173-0655-4a4f-ab8d-4aea88119e96 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.260263] env[69475]: DEBUG oslo_concurrency.lockutils [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Releasing lock "refresh_cache-8f65d893-d2e2-452f-8870-f72ec036f16a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.261307] env[69475]: DEBUG nova.compute.manager [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Instance network_info: |[{"id": "32a4878e-f7f5-490d-a877-d01cb7eaa6dc", "address": "fa:16:3e:c3:b0:ae", "network": {"id": "de4f7b85-fb16-4097-91e9-9f3cf05371be", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-742523535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "c112f0c0629d4bf9a01e59342b38da87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23f4655e-3495-421d-be4e-f6002a85a47a", "external-id": "nsx-vlan-transportzone-520", "segmentation_id": 520, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32a4878e-f7", "ovs_interfaceid": "32a4878e-f7f5-490d-a877-d01cb7eaa6dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 668.261495] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:b0:ae', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '23f4655e-3495-421d-be4e-f6002a85a47a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '32a4878e-f7f5-490d-a877-d01cb7eaa6dc', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 668.272019] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Creating folder: Project (c112f0c0629d4bf9a01e59342b38da87). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 668.272819] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d085ab04-7ec5-4e07-88de-ba3ceff84e1f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.284470] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Created folder: Project (c112f0c0629d4bf9a01e59342b38da87) in parent group-v700823. [ 668.284715] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Creating folder: Instances. Parent ref: group-v700897. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 668.285016] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-357dc116-264f-4d3a-b06a-908b6b1863c6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.294221] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Created folder: Instances in parent group-v700897. [ 668.294476] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 668.294694] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 668.294896] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-99c33551-bb6b-4d9f-a81d-bf6ec9ebe90d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.315807] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 668.315807] env[69475]: value = "task-3507701" [ 668.315807] env[69475]: _type = "Task" [ 668.315807] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.323052] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507701, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.369695] env[69475]: DEBUG nova.compute.utils [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 668.373130] env[69475]: DEBUG nova.compute.manager [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Not allocating networking since 'none' was specified. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 668.391065] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507692, 'name': CreateVM_Task, 'duration_secs': 0.531812} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.391669] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 668.392307] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.392579] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.392754] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 668.393262] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c849950-4958-47a8-9ebc-dd9a0088ee99 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.400510] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 668.400510] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525444f1-6075-4aff-f3ed-bcc0a316ca85" [ 668.400510] env[69475]: _type = "Task" [ 668.400510] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.409923] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525444f1-6075-4aff-f3ed-bcc0a316ca85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.482887] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 668.483086] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 668.483245] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 668.483590] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 668.483590] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 668.483691] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 668.483829] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69475) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 668.483927] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager.update_available_resource {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 668.495943] env[69475]: DEBUG oslo_vmware.api [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Task: {'id': task-3507696, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.224323} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.496844] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 668.498011] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 668.498011] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 668.498011] env[69475]: INFO nova.compute.manager [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Took 1.18 seconds to destroy the instance on the hypervisor. [ 668.498011] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 668.498011] env[69475]: DEBUG nova.compute.manager [-] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 668.498195] env[69475]: DEBUG nova.network.neutron [-] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 668.524758] env[69475]: INFO nova.compute.manager [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Took 33.28 seconds to build instance. [ 668.695208] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "3e332e28-5db5-4f04-8a47-95406da16e21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.695391] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "3e332e28-5db5-4f04-8a47-95406da16e21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.739739] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "712e93b6-e797-4b9f-b39b-33373cede403" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.739991] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "712e93b6-e797-4b9f-b39b-33373cede403" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.781901] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "daef2117-0d9f-4c9e-99e7-1e8a65aa1f22" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.782161] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "daef2117-0d9f-4c9e-99e7-1e8a65aa1f22" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.828840] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507701, 'name': CreateVM_Task, 'duration_secs': 0.30667} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.829244] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 668.830014] env[69475]: DEBUG oslo_concurrency.lockutils [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.830185] env[69475]: DEBUG oslo_concurrency.lockutils [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.830523] env[69475]: DEBUG oslo_concurrency.lockutils [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 668.830766] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd6c2538-9b5e-43ae-9df7-d38a7224df93 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.839310] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Waiting for the task: (returnval){ [ 668.839310] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52700bea-255c-fa62-5921-e06f9b82b15a" [ 668.839310] env[69475]: _type = "Task" [ 668.839310] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.852404] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52700bea-255c-fa62-5921-e06f9b82b15a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.875363] env[69475]: DEBUG nova.compute.manager [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 668.889100] env[69475]: DEBUG oslo_concurrency.lockutils [None req-daf4cfc7-4ad8-41f9-bbd0-a1f295db3c47 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.023s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 668.889100] env[69475]: DEBUG oslo_concurrency.lockutils [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.730s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 668.889100] env[69475]: DEBUG nova.objects.instance [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lazy-loading 'resources' on Instance uuid 91d5b0db-63a5-4290-af9b-264a5ce4cd95 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 668.911048] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525444f1-6075-4aff-f3ed-bcc0a316ca85, 'name': SearchDatastore_Task, 'duration_secs': 0.011772} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.911344] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 668.912035] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 668.912035] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.912035] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.912184] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 668.912351] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-181dc7b4-ce7a-44a7-9d7d-7e01771be658 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.924961] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 668.925178] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 668.926564] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6122afe8-7027-471f-98aa-56544bc1dc4c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.934727] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 668.934727] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522d87bb-6c6d-0677-f1bd-a3e9c573f499" [ 668.934727] env[69475]: _type = "Task" [ 668.934727] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.946239] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522d87bb-6c6d-0677-f1bd-a3e9c573f499, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.986650] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.027227] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb6089ef-bb1f-4778-91c8-2772f839ed8d tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "c078753c-48a6-490b-8d7d-b0832eced25e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.201s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 669.322907] env[69475]: DEBUG nova.compute.manager [req-c361e2f1-b64f-4b80-8c9e-2050da80fe88 req-50b06c53-8367-4173-b919-83a102ae689c service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Received event network-changed-32a4878e-f7f5-490d-a877-d01cb7eaa6dc {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 669.323816] env[69475]: DEBUG nova.compute.manager [req-c361e2f1-b64f-4b80-8c9e-2050da80fe88 req-50b06c53-8367-4173-b919-83a102ae689c service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Refreshing instance network info cache due to event network-changed-32a4878e-f7f5-490d-a877-d01cb7eaa6dc. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 669.323816] env[69475]: DEBUG oslo_concurrency.lockutils [req-c361e2f1-b64f-4b80-8c9e-2050da80fe88 req-50b06c53-8367-4173-b919-83a102ae689c service nova] Acquiring lock "refresh_cache-8f65d893-d2e2-452f-8870-f72ec036f16a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.323816] env[69475]: DEBUG oslo_concurrency.lockutils [req-c361e2f1-b64f-4b80-8c9e-2050da80fe88 req-50b06c53-8367-4173-b919-83a102ae689c service nova] Acquired lock "refresh_cache-8f65d893-d2e2-452f-8870-f72ec036f16a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.323816] env[69475]: DEBUG nova.network.neutron [req-c361e2f1-b64f-4b80-8c9e-2050da80fe88 req-50b06c53-8367-4173-b919-83a102ae689c service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Refreshing network info cache for port 32a4878e-f7f5-490d-a877-d01cb7eaa6dc {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 669.329912] env[69475]: DEBUG nova.network.neutron [-] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.352727] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52700bea-255c-fa62-5921-e06f9b82b15a, 'name': SearchDatastore_Task, 'duration_secs': 0.014379} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.353125] env[69475]: DEBUG oslo_concurrency.lockutils [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.353200] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 669.353420] env[69475]: DEBUG oslo_concurrency.lockutils [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.353585] env[69475]: DEBUG oslo_concurrency.lockutils [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.353728] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 669.354242] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9adde774-af45-4be5-8004-4790eb87424b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.369549] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "baf27027-678d-4167-bb9b-df410aeb0e82" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 669.369549] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "baf27027-678d-4167-bb9b-df410aeb0e82" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 669.369946] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 669.369946] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 669.370742] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42c7cf2e-3b72-4ff8-8983-43ec1028c582 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.384377] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Waiting for the task: (returnval){ [ 669.384377] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ab9d97-1b9a-3b25-81aa-9709075893b3" [ 669.384377] env[69475]: _type = "Task" [ 669.384377] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.396647] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ab9d97-1b9a-3b25-81aa-9709075893b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.447509] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522d87bb-6c6d-0677-f1bd-a3e9c573f499, 'name': SearchDatastore_Task, 'duration_secs': 0.014634} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.450570] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c705f47-c139-4c8d-a55d-a84629ad51bc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.456897] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 669.456897] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d5d5d0-7875-5ece-718d-8d681ea766d7" [ 669.456897] env[69475]: _type = "Task" [ 669.456897] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.469319] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d5d5d0-7875-5ece-718d-8d681ea766d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.531530] env[69475]: DEBUG nova.compute.manager [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 669.836127] env[69475]: INFO nova.compute.manager [-] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Took 1.34 seconds to deallocate network for instance. [ 669.884326] env[69475]: DEBUG nova.compute.manager [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 669.890291] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7312afe-57e5-4578-a21f-40238e625030 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.906028] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbf5fa3d-1232-418a-88c8-28d970f993df {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.908292] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ab9d97-1b9a-3b25-81aa-9709075893b3, 'name': SearchDatastore_Task, 'duration_secs': 0.019404} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.914159] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a81f17d-418d-464c-9958-ebef32048759 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.949324] env[69475]: DEBUG nova.virt.hardware [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 669.949585] env[69475]: DEBUG nova.virt.hardware [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 669.949739] env[69475]: DEBUG nova.virt.hardware [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 669.949912] env[69475]: DEBUG nova.virt.hardware [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 669.950068] env[69475]: DEBUG nova.virt.hardware [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 669.950240] env[69475]: DEBUG nova.virt.hardware [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 669.950419] env[69475]: DEBUG nova.virt.hardware [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 669.950567] env[69475]: DEBUG nova.virt.hardware [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 669.950732] env[69475]: DEBUG nova.virt.hardware [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 669.950900] env[69475]: DEBUG nova.virt.hardware [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 669.951128] env[69475]: DEBUG nova.virt.hardware [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 669.952373] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c6a1f1-c2af-4c23-80e8-8cd19f011bdd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.955521] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef063c2a-2ba0-464c-a4cb-a0c331c9636f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.959463] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Waiting for the task: (returnval){ [ 669.959463] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523ef2e2-ca4f-e4fc-a523-665cdd4152ef" [ 669.959463] env[69475]: _type = "Task" [ 669.959463] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.971161] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0141501c-7b72-4e0a-aaba-6a0381f5d732 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.980199] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdaea94f-b3bb-4f53-bb46-e90d3bf65590 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.983557] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d5d5d0-7875-5ece-718d-8d681ea766d7, 'name': SearchDatastore_Task, 'duration_secs': 0.02762} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.987980] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.988034] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 3149cd80-503c-42e4-ac91-54aababe84e3/3149cd80-503c-42e4-ac91-54aababe84e3.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 669.992056] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523ef2e2-ca4f-e4fc-a523-665cdd4152ef, 'name': SearchDatastore_Task, 'duration_secs': 0.018363} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.992056] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9b03c135-7b48-4d74-8b44-8a2ae41a1394 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.992056] env[69475]: DEBUG oslo_concurrency.lockutils [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.992056] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 8f65d893-d2e2-452f-8870-f72ec036f16a/8f65d893-d2e2-452f-8870-f72ec036f16a.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 670.006297] env[69475]: DEBUG oslo_concurrency.lockutils [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "c078753c-48a6-490b-8d7d-b0832eced25e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.006537] env[69475]: DEBUG oslo_concurrency.lockutils [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "c078753c-48a6-490b-8d7d-b0832eced25e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.006737] env[69475]: DEBUG oslo_concurrency.lockutils [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "c078753c-48a6-490b-8d7d-b0832eced25e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.007183] env[69475]: DEBUG oslo_concurrency.lockutils [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "c078753c-48a6-490b-8d7d-b0832eced25e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.007183] env[69475]: DEBUG oslo_concurrency.lockutils [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "c078753c-48a6-490b-8d7d-b0832eced25e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.008648] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3610146c-08b6-4dd0-ad4b-bee6dc55cbe9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.010807] env[69475]: DEBUG nova.compute.provider_tree [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 670.012249] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Instance VIF info [] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 670.017873] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Creating folder: Project (de2cf075b6474570b2a2e3ff4c07de5e). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 670.019346] env[69475]: DEBUG nova.scheduler.client.report [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 670.022604] env[69475]: INFO nova.compute.manager [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Terminating instance [ 670.024360] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2cba6e19-3cc5-481c-b6ed-ff2681bb4779 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.030816] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 670.030816] env[69475]: value = "task-3507702" [ 670.030816] env[69475]: _type = "Task" [ 670.030816] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.036016] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Waiting for the task: (returnval){ [ 670.036016] env[69475]: value = "task-3507704" [ 670.036016] env[69475]: _type = "Task" [ 670.036016] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.045633] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Created folder: Project (de2cf075b6474570b2a2e3ff4c07de5e) in parent group-v700823. [ 670.045633] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Creating folder: Instances. Parent ref: group-v700900. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 670.050440] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1392af17-fe0b-451e-beed-db464e0aeb29 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.052205] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507702, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.059478] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': task-3507704, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.066959] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.069142] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Created folder: Instances in parent group-v700900. [ 670.069555] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 670.069876] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 670.070186] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a3fd6835-9fd0-483d-8685-d2fc0a1bea92 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.088233] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 670.088233] env[69475]: value = "task-3507707" [ 670.088233] env[69475]: _type = "Task" [ 670.088233] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.098180] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507707, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.165668] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "3eda17da-111c-412d-9af4-d3a40b7d8faa" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.165976] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "3eda17da-111c-412d-9af4-d3a40b7d8faa" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.166300] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "3eda17da-111c-412d-9af4-d3a40b7d8faa-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.166593] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "3eda17da-111c-412d-9af4-d3a40b7d8faa-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.166806] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "3eda17da-111c-412d-9af4-d3a40b7d8faa-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.169525] env[69475]: INFO nova.compute.manager [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Terminating instance [ 670.204022] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquiring lock "c3db35f4-f43d-464c-9556-18a90866ee6a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.204297] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "c3db35f4-f43d-464c-9556-18a90866ee6a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.204549] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquiring lock "c3db35f4-f43d-464c-9556-18a90866ee6a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.204814] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "c3db35f4-f43d-464c-9556-18a90866ee6a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.205019] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "c3db35f4-f43d-464c-9556-18a90866ee6a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.209316] env[69475]: INFO nova.compute.manager [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Terminating instance [ 670.350449] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.527140] env[69475]: DEBUG nova.network.neutron [req-c361e2f1-b64f-4b80-8c9e-2050da80fe88 req-50b06c53-8367-4173-b919-83a102ae689c service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Updated VIF entry in instance network info cache for port 32a4878e-f7f5-490d-a877-d01cb7eaa6dc. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 670.527140] env[69475]: DEBUG nova.network.neutron [req-c361e2f1-b64f-4b80-8c9e-2050da80fe88 req-50b06c53-8367-4173-b919-83a102ae689c service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Updating instance_info_cache with network_info: [{"id": "32a4878e-f7f5-490d-a877-d01cb7eaa6dc", "address": "fa:16:3e:c3:b0:ae", "network": {"id": "de4f7b85-fb16-4097-91e9-9f3cf05371be", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-742523535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c112f0c0629d4bf9a01e59342b38da87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23f4655e-3495-421d-be4e-f6002a85a47a", "external-id": "nsx-vlan-transportzone-520", "segmentation_id": 520, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32a4878e-f7", "ovs_interfaceid": "32a4878e-f7f5-490d-a877-d01cb7eaa6dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.528130] env[69475]: DEBUG oslo_concurrency.lockutils [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.639s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.531851] env[69475]: DEBUG nova.compute.manager [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 670.532804] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 670.533238] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.323s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.534767] env[69475]: INFO nova.compute.claims [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 670.538759] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb8077bd-8f74-4233-bd91-5d8f9fe7effb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.558337] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507702, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.568139] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': task-3507704, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.568280] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 670.568478] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e727f4aa-a8cd-4ecf-81e7-1698d6917301 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.571294] env[69475]: INFO nova.scheduler.client.report [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Deleted allocations for instance 91d5b0db-63a5-4290-af9b-264a5ce4cd95 [ 670.589289] env[69475]: DEBUG oslo_vmware.api [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 670.589289] env[69475]: value = "task-3507709" [ 670.589289] env[69475]: _type = "Task" [ 670.589289] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.616098] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507707, 'name': CreateVM_Task, 'duration_secs': 0.394512} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.617167] env[69475]: DEBUG oslo_vmware.api [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507709, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.617513] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 670.618140] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.618469] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.618934] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 670.619384] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9d34342-c852-4245-bb7c-3d1e82488afb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.627676] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Waiting for the task: (returnval){ [ 670.627676] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ee99ee-5171-f3c9-b349-39be13007bee" [ 670.627676] env[69475]: _type = "Task" [ 670.627676] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.639018] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ee99ee-5171-f3c9-b349-39be13007bee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.674246] env[69475]: DEBUG nova.compute.manager [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 670.674454] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 670.675634] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90fb3484-29ba-4e79-b0b1-8fbfb647acd9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.688549] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 670.689107] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0109386a-1894-43fb-b4cb-8e8fff2ec8c4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.700868] env[69475]: DEBUG oslo_vmware.api [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 670.700868] env[69475]: value = "task-3507710" [ 670.700868] env[69475]: _type = "Task" [ 670.700868] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.714779] env[69475]: DEBUG nova.compute.manager [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 670.716042] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 670.716042] env[69475]: DEBUG oslo_vmware.api [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507710, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.716685] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012389ca-4813-4a1e-bde6-856011acbbb1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.727315] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 670.727789] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19c46e5a-1bc6-48b6-869e-c60c23cc0723 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.826900] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 670.827332] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 670.827735] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Deleting the datastore file [datastore1] c3db35f4-f43d-464c-9556-18a90866ee6a {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 670.828364] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d786f0fa-7d6b-49f1-9ecf-0bd4e297e609 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.837319] env[69475]: DEBUG oslo_vmware.api [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 670.837319] env[69475]: value = "task-3507712" [ 670.837319] env[69475]: _type = "Task" [ 670.837319] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.848574] env[69475]: DEBUG oslo_vmware.api [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507712, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.027995] env[69475]: DEBUG oslo_concurrency.lockutils [req-c361e2f1-b64f-4b80-8c9e-2050da80fe88 req-50b06c53-8367-4173-b919-83a102ae689c service nova] Releasing lock "refresh_cache-8f65d893-d2e2-452f-8870-f72ec036f16a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.028332] env[69475]: DEBUG nova.compute.manager [req-c361e2f1-b64f-4b80-8c9e-2050da80fe88 req-50b06c53-8367-4173-b919-83a102ae689c service nova] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Received event network-vif-deleted-4c4728a8-f4a8-44f7-9492-8e43fbf061ae {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 671.028567] env[69475]: INFO nova.compute.manager [req-c361e2f1-b64f-4b80-8c9e-2050da80fe88 req-50b06c53-8367-4173-b919-83a102ae689c service nova] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Neutron deleted interface 4c4728a8-f4a8-44f7-9492-8e43fbf061ae; detaching it from the instance and deleting it from the info cache [ 671.028771] env[69475]: DEBUG nova.network.neutron [req-c361e2f1-b64f-4b80-8c9e-2050da80fe88 req-50b06c53-8367-4173-b919-83a102ae689c service nova] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.057047] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507702, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.625115} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.061725] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 3149cd80-503c-42e4-ac91-54aababe84e3/3149cd80-503c-42e4-ac91-54aababe84e3.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 671.061844] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 671.062461] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': task-3507704, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.697829} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.062675] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5a977c15-76f8-4f80-866a-09dc128559a2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.064661] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 8f65d893-d2e2-452f-8870-f72ec036f16a/8f65d893-d2e2-452f-8870-f72ec036f16a.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 671.064868] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 671.065609] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-79fbdc0e-9a95-44d5-badd-0057d05bb515 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.076038] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 671.076038] env[69475]: value = "task-3507713" [ 671.076038] env[69475]: _type = "Task" [ 671.076038] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.077408] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Waiting for the task: (returnval){ [ 671.077408] env[69475]: value = "task-3507714" [ 671.077408] env[69475]: _type = "Task" [ 671.077408] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.088349] env[69475]: DEBUG oslo_concurrency.lockutils [None req-60337a24-8a47-4aa9-8957-41237be1c2cc tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "91d5b0db-63a5-4290-af9b-264a5ce4cd95" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.731s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.102688] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507713, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.102688] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': task-3507714, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.110996] env[69475]: DEBUG oslo_vmware.api [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507709, 'name': PowerOffVM_Task, 'duration_secs': 0.278504} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.111280] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 671.111488] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 671.111741] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-afb23d26-1803-44d4-929b-11db92d338c4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.141575] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ee99ee-5171-f3c9-b349-39be13007bee, 'name': SearchDatastore_Task, 'duration_secs': 0.021759} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.141575] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.141782] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 671.142066] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.142387] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.142452] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 671.142676] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f86e890-f5df-4fe0-8a66-9f4d4cf85db3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.156047] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 671.156294] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 671.157094] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aeafccd7-b777-4214-9741-09cd8eb41f35 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.163538] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Waiting for the task: (returnval){ [ 671.163538] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52076fc0-385f-418d-fad0-4acc41f39687" [ 671.163538] env[69475]: _type = "Task" [ 671.163538] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.173719] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52076fc0-385f-418d-fad0-4acc41f39687, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.212168] env[69475]: DEBUG oslo_vmware.api [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507710, 'name': PowerOffVM_Task, 'duration_secs': 0.25422} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.212514] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 671.212717] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 671.213011] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-192d002f-21d8-4785-8298-173d7ccf7a2c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.238828] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 671.239079] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 671.239282] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Deleting the datastore file [datastore1] c078753c-48a6-490b-8d7d-b0832eced25e {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 671.239578] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95f749d2-3a87-4c62-ad52-b2a236258e71 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.248387] env[69475]: DEBUG oslo_vmware.api [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 671.248387] env[69475]: value = "task-3507717" [ 671.248387] env[69475]: _type = "Task" [ 671.248387] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.258189] env[69475]: DEBUG oslo_vmware.api [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507717, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.295773] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 671.296151] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 671.296383] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Deleting the datastore file [datastore2] 3eda17da-111c-412d-9af4-d3a40b7d8faa {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 671.296455] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7e2ae599-e634-4b0e-8116-86d851f3a115 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.303171] env[69475]: DEBUG oslo_vmware.api [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 671.303171] env[69475]: value = "task-3507718" [ 671.303171] env[69475]: _type = "Task" [ 671.303171] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.312708] env[69475]: DEBUG oslo_vmware.api [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507718, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.347829] env[69475]: DEBUG oslo_vmware.api [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507712, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.195633} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.348267] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 671.348482] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 671.348672] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 671.348864] env[69475]: INFO nova.compute.manager [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Took 0.63 seconds to destroy the instance on the hypervisor. [ 671.349127] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 671.349386] env[69475]: DEBUG nova.compute.manager [-] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 671.349563] env[69475]: DEBUG nova.network.neutron [-] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 671.532611] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-96da4a88-8bea-40b8-9b09-2bf2809be859 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.543901] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6da0d6-a944-4ba1-9b7f-1177e2cd0a0f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.595352] env[69475]: DEBUG nova.compute.manager [req-c361e2f1-b64f-4b80-8c9e-2050da80fe88 req-50b06c53-8367-4173-b919-83a102ae689c service nova] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Detach interface failed, port_id=4c4728a8-f4a8-44f7-9492-8e43fbf061ae, reason: Instance 77a5665d-b00f-42c2-a1e8-319dfd232b06 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 672.335490] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Volume attach. Driver type: vmdk {{(pid=69475) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 672.335744] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700896', 'volume_id': 'abac38e1-aef6-4f0f-88d9-c61aede17432', 'name': 'volume-abac38e1-aef6-4f0f-88d9-c61aede17432', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9cfd8425-c1aa-4dbc-afa4-3a5aa10428de', 'attached_at': '', 'detached_at': '', 'volume_id': 'abac38e1-aef6-4f0f-88d9-c61aede17432', 'serial': 'abac38e1-aef6-4f0f-88d9-c61aede17432'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 672.340043] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd3db54-6817-49cf-bc7f-b54b5877c441 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.343739] env[69475]: DEBUG nova.compute.manager [req-5f05ca35-f5c3-421d-a598-7a5ae86478bc req-edc6c774-f65e-4363-ae25-dd2b767115a0 service nova] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Received event network-vif-deleted-00f2c4f8-7075-42f6-94f1-c09274ea941d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 672.344113] env[69475]: INFO nova.compute.manager [req-5f05ca35-f5c3-421d-a598-7a5ae86478bc req-edc6c774-f65e-4363-ae25-dd2b767115a0 service nova] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Neutron deleted interface 00f2c4f8-7075-42f6-94f1-c09274ea941d; detaching it from the instance and deleting it from the info cache [ 672.344326] env[69475]: DEBUG nova.network.neutron [req-5f05ca35-f5c3-421d-a598-7a5ae86478bc req-edc6c774-f65e-4363-ae25-dd2b767115a0 service nova] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.364794] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507713, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080356} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.365267] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ff2141a-1bd9-4ceb-ba22-a65d02659979 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.382774] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 672.383727] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': task-3507714, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.131953} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.392877] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c98a46-9702-401e-8e4c-e9cde0d44401 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.395701] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 672.396747] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b083ae47-0dfc-4486-ad1b-40de7a7f76ed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.399736] env[69475]: DEBUG oslo_vmware.api [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507717, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.251332} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.400223] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52076fc0-385f-418d-fad0-4acc41f39687, 'name': SearchDatastore_Task, 'duration_secs': 0.029859} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.403501] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1add350c-d359-4efd-8caa-a46dbb34184a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.405897] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 672.406218] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 672.406257] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 672.406409] env[69475]: INFO nova.compute.manager [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Took 1.87 seconds to destroy the instance on the hypervisor. [ 672.406639] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 672.411020] env[69475]: DEBUG nova.compute.manager [-] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 672.411020] env[69475]: DEBUG nova.network.neutron [-] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 672.411020] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45871aeb-f404-414c-816d-e0b82c0cafc5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.414809] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6454c0be-085b-4654-90e6-e1eda378b6b3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.453263] env[69475]: DEBUG oslo_vmware.api [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3507718, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.22784} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.463027] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] volume-abac38e1-aef6-4f0f-88d9-c61aede17432/volume-abac38e1-aef6-4f0f-88d9-c61aede17432.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 672.474306] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] 3149cd80-503c-42e4-ac91-54aababe84e3/3149cd80-503c-42e4-ac91-54aababe84e3.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 672.485209] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 672.485462] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 672.485679] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 672.485911] env[69475]: INFO nova.compute.manager [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Took 1.81 seconds to destroy the instance on the hypervisor. [ 672.486175] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 672.486373] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c752abc-1c6b-457b-b754-46f68acd6a90 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.500278] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a4083ec-514c-49be-81c4-abffd81291b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.522867] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] 8f65d893-d2e2-452f-8870-f72ec036f16a/8f65d893-d2e2-452f-8870-f72ec036f16a.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 672.537169] env[69475]: DEBUG nova.compute.manager [-] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 672.537285] env[69475]: DEBUG nova.network.neutron [-] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 672.538973] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-094ce621-8ceb-4b66-bc91-3735a559eb82 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.553735] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Waiting for the task: (returnval){ [ 672.553735] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5299dd6e-c336-2bb5-3a68-998d32458400" [ 672.553735] env[69475]: _type = "Task" [ 672.553735] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.554056] env[69475]: DEBUG nova.compute.manager [req-5f05ca35-f5c3-421d-a598-7a5ae86478bc req-edc6c774-f65e-4363-ae25-dd2b767115a0 service nova] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Detach interface failed, port_id=00f2c4f8-7075-42f6-94f1-c09274ea941d, reason: Instance c3db35f4-f43d-464c-9556-18a90866ee6a could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 672.562398] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 672.562398] env[69475]: value = "task-3507721" [ 672.562398] env[69475]: _type = "Task" [ 672.562398] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.571190] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5299dd6e-c336-2bb5-3a68-998d32458400, 'name': SearchDatastore_Task, 'duration_secs': 0.011666} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.571491] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Waiting for the task: (returnval){ [ 672.571491] env[69475]: value = "task-3507722" [ 672.571491] env[69475]: _type = "Task" [ 672.571491] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.571794] env[69475]: DEBUG oslo_vmware.api [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Waiting for the task: (returnval){ [ 672.571794] env[69475]: value = "task-3507720" [ 672.571794] env[69475]: _type = "Task" [ 672.571794] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.574760] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.575036] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] b87cac84-ea70-428b-872e-4f6145e36b39/b87cac84-ea70-428b-872e-4f6145e36b39.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 672.575546] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2802484a-e50b-405e-885f-e01b9e660f73 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.586321] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507721, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.600931] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': task-3507722, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.601329] env[69475]: DEBUG oslo_vmware.api [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Task: {'id': task-3507720, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.603052] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Waiting for the task: (returnval){ [ 672.603052] env[69475]: value = "task-3507723" [ 672.603052] env[69475]: _type = "Task" [ 672.603052] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.615104] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquiring lock "df73dd41-7455-4482-abb2-b61b26fcf403" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.615372] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "df73dd41-7455-4482-abb2-b61b26fcf403" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.620327] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': task-3507723, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.839538] env[69475]: DEBUG nova.network.neutron [-] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.075890] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507721, 'name': ReconfigVM_Task, 'duration_secs': 0.456897} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.083040] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Reconfigured VM instance instance-00000019 to attach disk [datastore2] 3149cd80-503c-42e4-ac91-54aababe84e3/3149cd80-503c-42e4-ac91-54aababe84e3.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 673.084473] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f85b37ec-6d5e-40af-ba26-c5e838b74219 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.087017] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49a09cab-ee64-46df-8c1d-f13e73dc3c67 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.094559] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': task-3507722, 'name': ReconfigVM_Task, 'duration_secs': 0.34905} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.102071] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Reconfigured VM instance instance-00000018 to attach disk [datastore1] 8f65d893-d2e2-452f-8870-f72ec036f16a/8f65d893-d2e2-452f-8870-f72ec036f16a.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 673.102071] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 673.102071] env[69475]: value = "task-3507724" [ 673.102071] env[69475]: _type = "Task" [ 673.102071] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.102071] env[69475]: DEBUG oslo_vmware.api [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Task: {'id': task-3507720, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.102483] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d6ede949-25db-4b89-950e-1bad3bffdbd3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.105139] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a62522-1f06-48a2-b965-104656117ac1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.159840] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43d37c9-d8ae-4c71-b108-e02d05f9f69c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.162715] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': task-3507723, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487738} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.163097] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Waiting for the task: (returnval){ [ 673.163097] env[69475]: value = "task-3507725" [ 673.163097] env[69475]: _type = "Task" [ 673.163097] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.163384] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507724, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.164170] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] b87cac84-ea70-428b-872e-4f6145e36b39/b87cac84-ea70-428b-872e-4f6145e36b39.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 673.164570] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 673.165430] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ece9fe27-31c5-4038-904d-fd7cda464152 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.176587] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7447676-60af-4d8b-bfba-9315e58832a7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.185496] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Waiting for the task: (returnval){ [ 673.185496] env[69475]: value = "task-3507726" [ 673.185496] env[69475]: _type = "Task" [ 673.185496] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.185755] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': task-3507725, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.199924] env[69475]: DEBUG nova.compute.provider_tree [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 673.204689] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': task-3507726, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.344317] env[69475]: INFO nova.compute.manager [-] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Took 1.99 seconds to deallocate network for instance. [ 673.587952] env[69475]: DEBUG oslo_vmware.api [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Task: {'id': task-3507720, 'name': ReconfigVM_Task, 'duration_secs': 0.66299} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.587952] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Reconfigured VM instance instance-00000008 to attach disk [datastore2] volume-abac38e1-aef6-4f0f-88d9-c61aede17432/volume-abac38e1-aef6-4f0f-88d9-c61aede17432.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 673.592698] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5248ed43-7cad-4d36-b3e4-e3781e032f3f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.612139] env[69475]: DEBUG oslo_vmware.api [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Waiting for the task: (returnval){ [ 673.612139] env[69475]: value = "task-3507727" [ 673.612139] env[69475]: _type = "Task" [ 673.612139] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.624662] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507724, 'name': Rename_Task, 'duration_secs': 0.171691} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.628536] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 673.629709] env[69475]: DEBUG oslo_vmware.api [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Task: {'id': task-3507727, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.629709] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-711f6af1-dbbc-4faf-bc9f-d6b3973107e0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.636666] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 673.636666] env[69475]: value = "task-3507728" [ 673.636666] env[69475]: _type = "Task" [ 673.636666] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.647074] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507728, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.665104] env[69475]: DEBUG nova.network.neutron [-] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.675681] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': task-3507725, 'name': Rename_Task, 'duration_secs': 0.170248} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.675967] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 673.676405] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e10d7227-b275-49e1-8a0a-528d7be1b96e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.682939] env[69475]: DEBUG nova.network.neutron [-] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.685061] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Waiting for the task: (returnval){ [ 673.685061] env[69475]: value = "task-3507729" [ 673.685061] env[69475]: _type = "Task" [ 673.685061] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.699911] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': task-3507726, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077495} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.703122] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 673.703757] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': task-3507729, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.704471] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed74e126-b8ca-48b5-8a80-e2d2469a497c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.707790] env[69475]: DEBUG nova.scheduler.client.report [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 673.732317] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Reconfiguring VM instance instance-0000001a to attach disk [datastore2] b87cac84-ea70-428b-872e-4f6145e36b39/b87cac84-ea70-428b-872e-4f6145e36b39.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 673.733306] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91d4cd36-162f-482c-b10a-87522101d1c0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.755996] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Waiting for the task: (returnval){ [ 673.755996] env[69475]: value = "task-3507730" [ 673.755996] env[69475]: _type = "Task" [ 673.755996] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.765549] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': task-3507730, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.772696] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "a75d7a92-4ac7-4fa0-90f0-f0a0993e881e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 673.772696] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "a75d7a92-4ac7-4fa0-90f0-f0a0993e881e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 673.850025] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.024080] env[69475]: DEBUG nova.compute.manager [req-3da76de1-94bd-4d3d-abd6-eaeb84260bb2 req-568fe9eb-d926-4379-86e9-0b5af11f972c service nova] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Received event network-vif-deleted-a9de04f5-6001-4dc3-a305-3afbdf2429c6 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 674.024314] env[69475]: DEBUG nova.compute.manager [req-3da76de1-94bd-4d3d-abd6-eaeb84260bb2 req-568fe9eb-d926-4379-86e9-0b5af11f972c service nova] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Received event network-vif-deleted-5fa5b65a-d1fb-4e45-8fea-68beefb4f999 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 674.125803] env[69475]: DEBUG oslo_vmware.api [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Task: {'id': task-3507727, 'name': ReconfigVM_Task, 'duration_secs': 0.228042} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.126121] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700896', 'volume_id': 'abac38e1-aef6-4f0f-88d9-c61aede17432', 'name': 'volume-abac38e1-aef6-4f0f-88d9-c61aede17432', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9cfd8425-c1aa-4dbc-afa4-3a5aa10428de', 'attached_at': '', 'detached_at': '', 'volume_id': 'abac38e1-aef6-4f0f-88d9-c61aede17432', 'serial': 'abac38e1-aef6-4f0f-88d9-c61aede17432'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 674.148730] env[69475]: DEBUG oslo_vmware.api [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507728, 'name': PowerOnVM_Task, 'duration_secs': 0.46835} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.148819] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 674.148962] env[69475]: INFO nova.compute.manager [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Took 6.91 seconds to spawn the instance on the hypervisor. [ 674.149151] env[69475]: DEBUG nova.compute.manager [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 674.149903] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf922446-464e-405b-8649-9961bf40ced9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.171214] env[69475]: INFO nova.compute.manager [-] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Took 1.76 seconds to deallocate network for instance. [ 674.184829] env[69475]: INFO nova.compute.manager [-] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Took 1.65 seconds to deallocate network for instance. [ 674.199338] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': task-3507729, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.213716] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.680s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.214609] env[69475]: DEBUG nova.compute.manager [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 674.216771] env[69475]: DEBUG oslo_concurrency.lockutils [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.821s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.218175] env[69475]: INFO nova.compute.claims [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 674.269240] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': task-3507730, 'name': ReconfigVM_Task, 'duration_secs': 0.343044} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.269240] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Reconfigured VM instance instance-0000001a to attach disk [datastore2] b87cac84-ea70-428b-872e-4f6145e36b39/b87cac84-ea70-428b-872e-4f6145e36b39.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 674.269240] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-38a5aeca-f857-4a31-a177-499005461840 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.275238] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Waiting for the task: (returnval){ [ 674.275238] env[69475]: value = "task-3507731" [ 674.275238] env[69475]: _type = "Task" [ 674.275238] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.286983] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': task-3507731, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.667452] env[69475]: INFO nova.compute.manager [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Took 30.93 seconds to build instance. [ 674.677791] env[69475]: DEBUG oslo_concurrency.lockutils [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.699336] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.699631] env[69475]: DEBUG oslo_vmware.api [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': task-3507729, 'name': PowerOnVM_Task, 'duration_secs': 0.521525} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.699869] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 674.700865] env[69475]: INFO nova.compute.manager [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Took 10.17 seconds to spawn the instance on the hypervisor. [ 674.700865] env[69475]: DEBUG nova.compute.manager [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 674.701915] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a606dd4-d236-4ed0-86f4-5a3b0b88e7fd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.724566] env[69475]: DEBUG nova.compute.utils [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 674.727739] env[69475]: DEBUG nova.compute.manager [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 674.727995] env[69475]: DEBUG nova.network.neutron [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 674.788568] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': task-3507731, 'name': Rename_Task, 'duration_secs': 0.322866} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.790030] env[69475]: DEBUG nova.policy [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f164f821924c4f4aae565d7352fef4a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8ffeef220f04d9eb22ef69b68e9c34a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 674.791548] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 674.791813] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-473e2a76-9649-4ae7-8ba5-a8c582587ad7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.801025] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Waiting for the task: (returnval){ [ 674.801025] env[69475]: value = "task-3507733" [ 674.801025] env[69475]: _type = "Task" [ 674.801025] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.811298] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': task-3507733, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.171389] env[69475]: DEBUG nova.objects.instance [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Lazy-loading 'flavor' on Instance uuid 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 675.172766] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7870e4d-6d62-4a1b-b59b-d4b69ac69b6a tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Lock "3149cd80-503c-42e4-ac91-54aababe84e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.620s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.176899] env[69475]: DEBUG nova.network.neutron [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Successfully created port: 18760a36-7bdf-4698-95f1-514490cd8c2b {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 675.227628] env[69475]: INFO nova.compute.manager [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Took 33.73 seconds to build instance. [ 675.231123] env[69475]: DEBUG nova.compute.manager [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 675.318116] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': task-3507733, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.626351] env[69475]: INFO nova.compute.manager [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Rebuilding instance [ 675.678544] env[69475]: DEBUG nova.compute.manager [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 675.681313] env[69475]: DEBUG nova.compute.manager [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 675.682497] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6233b19-6dc5-4269-845c-e9ce578774f1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.701619] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b8c74128-be11-4766-8cc4-d8a262a357fc tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.067s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.739663] env[69475]: DEBUG oslo_concurrency.lockutils [None req-69b07195-dc0f-413a-9ed6-a51a35b9a595 tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Lock "8f65d893-d2e2-452f-8870-f72ec036f16a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.096s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.814974] env[69475]: DEBUG oslo_vmware.api [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': task-3507733, 'name': PowerOnVM_Task, 'duration_secs': 0.945381} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.814974] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 675.815164] env[69475]: INFO nova.compute.manager [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Took 5.93 seconds to spawn the instance on the hypervisor. [ 675.815320] env[69475]: DEBUG nova.compute.manager [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 675.816170] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b1b062-55b9-4563-8431-44172ff3457a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.847441] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c523f4c-12cb-44ea-a11c-9ead4627a910 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.854490] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f3cedda-300e-40ee-96a9-07cc90c3b3ec {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.890600] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d69cb377-f676-4ee8-8fe6-511e04833aac {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.900646] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9d43ce2-a1bf-4991-92cd-eba7061c697b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.919020] env[69475]: DEBUG nova.compute.provider_tree [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.215416] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.244897] env[69475]: DEBUG nova.compute.manager [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 676.250088] env[69475]: DEBUG nova.compute.manager [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 676.289738] env[69475]: DEBUG nova.virt.hardware [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 676.290064] env[69475]: DEBUG nova.virt.hardware [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 676.290273] env[69475]: DEBUG nova.virt.hardware [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 676.290496] env[69475]: DEBUG nova.virt.hardware [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 676.290661] env[69475]: DEBUG nova.virt.hardware [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 676.291225] env[69475]: DEBUG nova.virt.hardware [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 676.291646] env[69475]: DEBUG nova.virt.hardware [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 676.291877] env[69475]: DEBUG nova.virt.hardware [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 676.292077] env[69475]: DEBUG nova.virt.hardware [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 676.292256] env[69475]: DEBUG nova.virt.hardware [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 676.292433] env[69475]: DEBUG nova.virt.hardware [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 676.293355] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba8494c-1332-4901-a836-626889afc864 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.303928] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42713740-43ee-4b05-ae99-887e5ab28795 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.339761] env[69475]: INFO nova.compute.manager [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Took 30.50 seconds to build instance. [ 676.421255] env[69475]: DEBUG nova.scheduler.client.report [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 676.681630] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac2f1ea-ade5-489c-9f89-97ab89d9643e tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Acquiring lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.682012] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac2f1ea-ade5-489c-9f89-97ab89d9643e tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.700706] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 676.701329] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1c178dd-232c-4ca6-a62d-914262d90822 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.710964] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 676.710964] env[69475]: value = "task-3507735" [ 676.710964] env[69475]: _type = "Task" [ 676.710964] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.732234] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507735, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.780627] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.839504] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fbd8cff7-5279-4324-8648-6ddc782a5099 tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Lock "b87cac84-ea70-428b-872e-4f6145e36b39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.914s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.932993] env[69475]: DEBUG oslo_concurrency.lockutils [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.715s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.932993] env[69475]: DEBUG nova.compute.manager [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 676.936020] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.227s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.936363] env[69475]: DEBUG nova.objects.instance [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Lazy-loading 'resources' on Instance uuid a22a4d65-56eb-4313-bd0e-81148981f5b8 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 677.162390] env[69475]: DEBUG nova.compute.manager [None req-bd293cf1-661c-4d04-a3d3-1bd9cf0c1c0b tempest-ServerDiagnosticsV248Test-1962308902 tempest-ServerDiagnosticsV248Test-1962308902-project-admin] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 677.163661] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa6474b-65e3-436e-bfa6-f7d665a553c1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.172257] env[69475]: INFO nova.compute.manager [None req-bd293cf1-661c-4d04-a3d3-1bd9cf0c1c0b tempest-ServerDiagnosticsV248Test-1962308902 tempest-ServerDiagnosticsV248Test-1962308902-project-admin] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Retrieving diagnostics [ 677.173176] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ab45ef3-787f-49ee-a2ef-af90186ce5b2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.203888] env[69475]: INFO nova.compute.manager [None req-bac2f1ea-ade5-489c-9f89-97ab89d9643e tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Detaching volume abac38e1-aef6-4f0f-88d9-c61aede17432 [ 677.223661] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507735, 'name': PowerOffVM_Task, 'duration_secs': 0.140835} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.224019] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 677.224307] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 677.225117] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9bf3e97-122d-49ab-a92e-ce8515e5aa28 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.233201] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 677.233619] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1db28435-6889-4a15-ba40-09feccbf85d5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.244835] env[69475]: INFO nova.virt.block_device [None req-bac2f1ea-ade5-489c-9f89-97ab89d9643e tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Attempting to driver detach volume abac38e1-aef6-4f0f-88d9-c61aede17432 from mountpoint /dev/sdb [ 677.245104] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-bac2f1ea-ade5-489c-9f89-97ab89d9643e tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 677.245322] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-bac2f1ea-ade5-489c-9f89-97ab89d9643e tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700896', 'volume_id': 'abac38e1-aef6-4f0f-88d9-c61aede17432', 'name': 'volume-abac38e1-aef6-4f0f-88d9-c61aede17432', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9cfd8425-c1aa-4dbc-afa4-3a5aa10428de', 'attached_at': '', 'detached_at': '', 'volume_id': 'abac38e1-aef6-4f0f-88d9-c61aede17432', 'serial': 'abac38e1-aef6-4f0f-88d9-c61aede17432'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 677.246238] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc552747-859f-466c-a72d-da0b6b315b96 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.273577] env[69475]: DEBUG nova.network.neutron [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Successfully updated port: 18760a36-7bdf-4698-95f1-514490cd8c2b {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 677.276871] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69533bd3-c69b-4c2d-8355-ffba47ca6d14 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.279788] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 677.280021] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 677.280238] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Deleting the datastore file [datastore2] 3149cd80-503c-42e4-ac91-54aababe84e3 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 677.281094] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2e935f1-9c98-4835-85d1-ad57206aa444 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.293060] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f99c7ca-27a4-4393-80e2-501336443344 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.295855] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 677.295855] env[69475]: value = "task-3507737" [ 677.295855] env[69475]: _type = "Task" [ 677.295855] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.317216] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4232c5-0f5c-42c1-a1f8-469149d719be {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.323771] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507737, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.339188] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-bac2f1ea-ade5-489c-9f89-97ab89d9643e tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] The volume has not been displaced from its original location: [datastore2] volume-abac38e1-aef6-4f0f-88d9-c61aede17432/volume-abac38e1-aef6-4f0f-88d9-c61aede17432.vmdk. No consolidation needed. {{(pid=69475) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 677.343065] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-bac2f1ea-ade5-489c-9f89-97ab89d9643e tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Reconfiguring VM instance instance-00000008 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 677.343490] env[69475]: DEBUG nova.compute.manager [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 677.345952] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dfe7996f-aa4f-48a2-bf19-5ad8cf97543e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.367406] env[69475]: DEBUG oslo_vmware.api [None req-bac2f1ea-ade5-489c-9f89-97ab89d9643e tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Waiting for the task: (returnval){ [ 677.367406] env[69475]: value = "task-3507738" [ 677.367406] env[69475]: _type = "Task" [ 677.367406] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.376900] env[69475]: DEBUG oslo_vmware.api [None req-bac2f1ea-ade5-489c-9f89-97ab89d9643e tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Task: {'id': task-3507738, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.440904] env[69475]: DEBUG nova.compute.utils [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 677.442658] env[69475]: DEBUG nova.compute.manager [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 677.442826] env[69475]: DEBUG nova.network.neutron [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 677.520449] env[69475]: DEBUG nova.policy [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d4323c195b24245a75109e165f900f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e6dd9c026624896ae4de7fab35720d8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 677.785020] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "refresh_cache-fa2ca135-3cd2-411e-b1fc-35b93a97e75d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 677.785020] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "refresh_cache-fa2ca135-3cd2-411e-b1fc-35b93a97e75d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 677.785020] env[69475]: DEBUG nova.network.neutron [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 677.808646] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507737, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138211} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.808919] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 677.809108] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 677.809279] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 677.881903] env[69475]: DEBUG oslo_vmware.api [None req-bac2f1ea-ade5-489c-9f89-97ab89d9643e tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Task: {'id': task-3507738, 'name': ReconfigVM_Task, 'duration_secs': 0.355666} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.882831] env[69475]: DEBUG oslo_concurrency.lockutils [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.885385] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-bac2f1ea-ade5-489c-9f89-97ab89d9643e tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Reconfigured VM instance instance-00000008 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 677.891285] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e020d366-deb7-40e2-b23e-d3ae40901b3e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.912024] env[69475]: DEBUG oslo_vmware.api [None req-bac2f1ea-ade5-489c-9f89-97ab89d9643e tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Waiting for the task: (returnval){ [ 677.912024] env[69475]: value = "task-3507739" [ 677.912024] env[69475]: _type = "Task" [ 677.912024] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.918934] env[69475]: DEBUG oslo_vmware.api [None req-bac2f1ea-ade5-489c-9f89-97ab89d9643e tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Task: {'id': task-3507739, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.948287] env[69475]: DEBUG nova.compute.manager [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 678.021528] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d07dc7-b243-4e37-8e7a-88d882071826 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.030555] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b15487-9144-473a-9354-0e30f0158711 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.035868] env[69475]: DEBUG nova.compute.manager [req-b2e5272b-104b-42d8-a550-3b8d689d26a4 req-4c2a4eab-b90b-4db4-af4e-8504a517d34d service nova] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Received event network-vif-plugged-18760a36-7bdf-4698-95f1-514490cd8c2b {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 678.036135] env[69475]: DEBUG oslo_concurrency.lockutils [req-b2e5272b-104b-42d8-a550-3b8d689d26a4 req-4c2a4eab-b90b-4db4-af4e-8504a517d34d service nova] Acquiring lock "fa2ca135-3cd2-411e-b1fc-35b93a97e75d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.036376] env[69475]: DEBUG oslo_concurrency.lockutils [req-b2e5272b-104b-42d8-a550-3b8d689d26a4 req-4c2a4eab-b90b-4db4-af4e-8504a517d34d service nova] Lock "fa2ca135-3cd2-411e-b1fc-35b93a97e75d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 678.036580] env[69475]: DEBUG oslo_concurrency.lockutils [req-b2e5272b-104b-42d8-a550-3b8d689d26a4 req-4c2a4eab-b90b-4db4-af4e-8504a517d34d service nova] Lock "fa2ca135-3cd2-411e-b1fc-35b93a97e75d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 678.036781] env[69475]: DEBUG nova.compute.manager [req-b2e5272b-104b-42d8-a550-3b8d689d26a4 req-4c2a4eab-b90b-4db4-af4e-8504a517d34d service nova] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] No waiting events found dispatching network-vif-plugged-18760a36-7bdf-4698-95f1-514490cd8c2b {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 678.036980] env[69475]: WARNING nova.compute.manager [req-b2e5272b-104b-42d8-a550-3b8d689d26a4 req-4c2a4eab-b90b-4db4-af4e-8504a517d34d service nova] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Received unexpected event network-vif-plugged-18760a36-7bdf-4698-95f1-514490cd8c2b for instance with vm_state building and task_state spawning. [ 678.069954] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40c251b6-d9d2-4dc5-8111-30a56fef0f91 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.077252] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-108f7060-6423-4f9a-a1a8-0de51be79850 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.091067] env[69475]: DEBUG nova.compute.provider_tree [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.110147] env[69475]: DEBUG nova.network.neutron [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Successfully created port: 241f2dac-56bc-473f-8ee9-6df190c99664 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 678.350467] env[69475]: DEBUG nova.network.neutron [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 678.420718] env[69475]: DEBUG oslo_vmware.api [None req-bac2f1ea-ade5-489c-9f89-97ab89d9643e tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Task: {'id': task-3507739, 'name': ReconfigVM_Task, 'duration_secs': 0.1759} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.421027] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-bac2f1ea-ade5-489c-9f89-97ab89d9643e tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700896', 'volume_id': 'abac38e1-aef6-4f0f-88d9-c61aede17432', 'name': 'volume-abac38e1-aef6-4f0f-88d9-c61aede17432', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '9cfd8425-c1aa-4dbc-afa4-3a5aa10428de', 'attached_at': '', 'detached_at': '', 'volume_id': 'abac38e1-aef6-4f0f-88d9-c61aede17432', 'serial': 'abac38e1-aef6-4f0f-88d9-c61aede17432'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 678.594854] env[69475]: DEBUG nova.scheduler.client.report [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 678.630890] env[69475]: DEBUG nova.network.neutron [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Updating instance_info_cache with network_info: [{"id": "18760a36-7bdf-4698-95f1-514490cd8c2b", "address": "fa:16:3e:98:53:7f", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18760a36-7b", "ovs_interfaceid": "18760a36-7bdf-4698-95f1-514490cd8c2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.850390] env[69475]: DEBUG nova.virt.hardware [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 678.850740] env[69475]: DEBUG nova.virt.hardware [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 678.850857] env[69475]: DEBUG nova.virt.hardware [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 678.851048] env[69475]: DEBUG nova.virt.hardware [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 678.851195] env[69475]: DEBUG nova.virt.hardware [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 678.851337] env[69475]: DEBUG nova.virt.hardware [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 678.851844] env[69475]: DEBUG nova.virt.hardware [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 678.852079] env[69475]: DEBUG nova.virt.hardware [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 678.852264] env[69475]: DEBUG nova.virt.hardware [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 678.852429] env[69475]: DEBUG nova.virt.hardware [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 678.852602] env[69475]: DEBUG nova.virt.hardware [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 678.853484] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1de606-ecf1-4e34-9333-6226b7b81ea1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.866021] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b576be78-5663-43cc-a542-3908b9c9099a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.884020] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Instance VIF info [] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 678.890721] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 678.891130] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 678.891422] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-920dbc7c-e51d-49df-8ed6-120d3dbb5f2c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.911743] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 678.911743] env[69475]: value = "task-3507740" [ 678.911743] env[69475]: _type = "Task" [ 678.911743] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.920299] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507740, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.972253] env[69475]: DEBUG nova.compute.manager [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 678.987466] env[69475]: DEBUG nova.objects.instance [None req-bac2f1ea-ade5-489c-9f89-97ab89d9643e tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Lazy-loading 'flavor' on Instance uuid 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 679.002041] env[69475]: DEBUG nova.virt.hardware [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 679.002272] env[69475]: DEBUG nova.virt.hardware [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 679.002421] env[69475]: DEBUG nova.virt.hardware [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 679.002592] env[69475]: DEBUG nova.virt.hardware [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 679.002727] env[69475]: DEBUG nova.virt.hardware [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 679.002901] env[69475]: DEBUG nova.virt.hardware [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 679.003145] env[69475]: DEBUG nova.virt.hardware [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 679.003298] env[69475]: DEBUG nova.virt.hardware [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 679.003456] env[69475]: DEBUG nova.virt.hardware [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 679.003609] env[69475]: DEBUG nova.virt.hardware [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 679.003771] env[69475]: DEBUG nova.virt.hardware [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 679.004707] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b1929ab-24d2-4d89-a7fc-ae67553969f6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.015239] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad981418-efed-47fa-b5b3-b75c26b4ddf6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.100202] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.164s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.102617] env[69475]: DEBUG oslo_concurrency.lockutils [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.233s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.102859] env[69475]: DEBUG nova.objects.instance [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lazy-loading 'resources' on Instance uuid 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 679.132774] env[69475]: INFO nova.scheduler.client.report [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Deleted allocations for instance a22a4d65-56eb-4313-bd0e-81148981f5b8 [ 679.134273] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "refresh_cache-fa2ca135-3cd2-411e-b1fc-35b93a97e75d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.134562] env[69475]: DEBUG nova.compute.manager [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Instance network_info: |[{"id": "18760a36-7bdf-4698-95f1-514490cd8c2b", "address": "fa:16:3e:98:53:7f", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18760a36-7b", "ovs_interfaceid": "18760a36-7bdf-4698-95f1-514490cd8c2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 679.139961] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:53:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '18760a36-7bdf-4698-95f1-514490cd8c2b', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 679.147925] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Creating folder: Project (f8ffeef220f04d9eb22ef69b68e9c34a). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 679.148665] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2549527-cc01-4df5-9727-e81ff6bfd837 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.162402] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Created folder: Project (f8ffeef220f04d9eb22ef69b68e9c34a) in parent group-v700823. [ 679.162689] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Creating folder: Instances. Parent ref: group-v700905. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 679.162848] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3392a844-0458-4c15-8ae7-943fd4b94242 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.175268] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Created folder: Instances in parent group-v700905. [ 679.175508] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 679.175719] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 679.175987] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e581664-28b4-4ade-bfaf-e9848930043e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.198889] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 679.198889] env[69475]: value = "task-3507743" [ 679.198889] env[69475]: _type = "Task" [ 679.198889] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.212467] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507743, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.429026] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507740, 'name': CreateVM_Task, 'duration_secs': 0.371558} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.429026] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 679.429026] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.429026] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.429026] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 679.429026] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3897dcf6-7573-4a90-9bf1-45034dd375f6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.435546] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 679.435546] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cf4038-bbcc-8b91-200a-2b4317a8c980" [ 679.435546] env[69475]: _type = "Task" [ 679.435546] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.447401] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cf4038-bbcc-8b91-200a-2b4317a8c980, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.654124] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edddaa1b-bfff-4bf6-ae7b-26e2f299ee6c tempest-ServersNegativeTestMultiTenantJSON-99401137 tempest-ServersNegativeTestMultiTenantJSON-99401137-project-member] Lock "a22a4d65-56eb-4313-bd0e-81148981f5b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.140s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.712697] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507743, 'name': CreateVM_Task, 'duration_secs': 0.35835} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.712852] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 679.713542] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.946708] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cf4038-bbcc-8b91-200a-2b4317a8c980, 'name': SearchDatastore_Task, 'duration_secs': 0.018035} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.946866] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.947112] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 679.947351] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.947499] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.947711] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 679.948145] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.949256] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 679.949509] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6d7e8296-0f7c-4599-8fcc-f2a811d99ea4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.952355] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ebedcbff-d1e8-4bcc-82dd-b871bc00a2cb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.962679] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 679.962679] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a3a474-0d7e-0b00-23fa-e2686d317451" [ 679.962679] env[69475]: _type = "Task" [ 679.962679] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.969738] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 679.969738] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 679.981147] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4755e6d3-e0f7-4d03-bca3-d5827ba4555b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.987475] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a3a474-0d7e-0b00-23fa-e2686d317451, 'name': SearchDatastore_Task, 'duration_secs': 0.010893} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.987475] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 679.987475] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 679.987475] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.989288] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 679.989288] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52df1220-b9d4-494a-0727-ad5602099cd4" [ 679.989288] env[69475]: _type = "Task" [ 679.989288] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 679.998803] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac2f1ea-ade5-489c-9f89-97ab89d9643e tempest-VolumesAssistedSnapshotsTest-1816905566 tempest-VolumesAssistedSnapshotsTest-1816905566-project-admin] Lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.317s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.002453] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52df1220-b9d4-494a-0727-ad5602099cd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.173155] env[69475]: DEBUG nova.network.neutron [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Successfully updated port: 241f2dac-56bc-473f-8ee9-6df190c99664 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 680.205057] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Acquiring lock "8bea34ef-0caf-4cdb-a689-dd747d9b52ea" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.205057] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Lock "8bea34ef-0caf-4cdb-a689-dd747d9b52ea" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.205057] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ceba7a-1698-42ee-b08b-fac7bebdcd78 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.214443] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e55af17-b15f-4425-9357-f7c4c695b55c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.252710] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c41a10-ad00-4bf8-b6a4-460cd14fbc38 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.262992] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c70040-b739-4d03-9719-b875e7b2ef85 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.279893] env[69475]: DEBUG nova.compute.provider_tree [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 680.501315] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52df1220-b9d4-494a-0727-ad5602099cd4, 'name': SearchDatastore_Task, 'duration_secs': 0.011446} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 680.502192] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-175dd3d9-eea6-42c3-9104-2cb319d2d97a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.509217] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 680.509217] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5288ca3e-1fdf-9efd-2eb0-776692db76f0" [ 680.509217] env[69475]: _type = "Task" [ 680.509217] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.518726] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5288ca3e-1fdf-9efd-2eb0-776692db76f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.546149] env[69475]: DEBUG nova.compute.manager [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Received event network-changed-18760a36-7bdf-4698-95f1-514490cd8c2b {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 680.546149] env[69475]: DEBUG nova.compute.manager [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Refreshing instance network info cache due to event network-changed-18760a36-7bdf-4698-95f1-514490cd8c2b. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 680.546149] env[69475]: DEBUG oslo_concurrency.lockutils [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] Acquiring lock "refresh_cache-fa2ca135-3cd2-411e-b1fc-35b93a97e75d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.547242] env[69475]: DEBUG oslo_concurrency.lockutils [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] Acquired lock "refresh_cache-fa2ca135-3cd2-411e-b1fc-35b93a97e75d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.547884] env[69475]: DEBUG nova.network.neutron [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Refreshing network info cache for port 18760a36-7bdf-4698-95f1-514490cd8c2b {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 680.681110] env[69475]: DEBUG oslo_concurrency.lockutils [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "refresh_cache-9e2d4d61-71ed-447a-b28e-c29c5bd8d763" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.681110] env[69475]: DEBUG oslo_concurrency.lockutils [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired lock "refresh_cache-9e2d4d61-71ed-447a-b28e-c29c5bd8d763" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 680.681110] env[69475]: DEBUG nova.network.neutron [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 680.783064] env[69475]: DEBUG nova.scheduler.client.report [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 681.026685] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5288ca3e-1fdf-9efd-2eb0-776692db76f0, 'name': SearchDatastore_Task, 'duration_secs': 0.011927} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.026939] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.027168] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 3149cd80-503c-42e4-ac91-54aababe84e3/3149cd80-503c-42e4-ac91-54aababe84e3.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 681.027552] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.027643] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 681.027933] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34fb2f77-9c36-4cd9-90aa-c604bab77f9b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.030148] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f26a74ad-78b5-40c3-b32a-47c80d7ce530 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.038341] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 681.038341] env[69475]: value = "task-3507745" [ 681.038341] env[69475]: _type = "Task" [ 681.038341] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.042960] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 681.043176] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 681.044261] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cd708d2-08b5-49f3-aab8-f6417b8e2f5b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.052404] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507745, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.058020] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 681.058020] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dd3692-e809-f427-03d6-b1e0e8d7b816" [ 681.058020] env[69475]: _type = "Task" [ 681.058020] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.069075] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dd3692-e809-f427-03d6-b1e0e8d7b816, 'name': SearchDatastore_Task} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.069914] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2913fc8-0a8d-42d2-808d-3389ac5f0b39 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.076411] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 681.076411] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525d4d4f-58bb-f774-c96a-74ec3d427602" [ 681.076411] env[69475]: _type = "Task" [ 681.076411] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.087077] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525d4d4f-58bb-f774-c96a-74ec3d427602, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.290668] env[69475]: DEBUG oslo_concurrency.lockutils [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.188s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.298029] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.355s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.298029] env[69475]: INFO nova.compute.claims [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 681.327090] env[69475]: INFO nova.scheduler.client.report [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleted allocations for instance 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3 [ 681.533091] env[69475]: DEBUG nova.network.neutron [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 681.549272] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507745, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509591} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.549694] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 3149cd80-503c-42e4-ac91-54aababe84e3/3149cd80-503c-42e4-ac91-54aababe84e3.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 681.549989] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 681.550326] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6fb10a2d-f0bb-4466-8fbe-d8c9a7ec9623 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.559209] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 681.559209] env[69475]: value = "task-3507746" [ 681.559209] env[69475]: _type = "Task" [ 681.559209] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.569837] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507746, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.587448] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525d4d4f-58bb-f774-c96a-74ec3d427602, 'name': SearchDatastore_Task, 'duration_secs': 0.010097} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.590312] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.590783] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] fa2ca135-3cd2-411e-b1fc-35b93a97e75d/fa2ca135-3cd2-411e-b1fc-35b93a97e75d.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 681.591124] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09c34b42-5e1c-4001-ba46-dd19b6383d52 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.600923] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 681.600923] env[69475]: value = "task-3507747" [ 681.600923] env[69475]: _type = "Task" [ 681.600923] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.613642] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3507747, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.840329] env[69475]: DEBUG oslo_concurrency.lockutils [None req-468bf978-c18b-49b3-ada8-4932ccd8c7b2 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.818s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.947038] env[69475]: DEBUG nova.network.neutron [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Updated VIF entry in instance network info cache for port 18760a36-7bdf-4698-95f1-514490cd8c2b. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 681.947473] env[69475]: DEBUG nova.network.neutron [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Updating instance_info_cache with network_info: [{"id": "18760a36-7bdf-4698-95f1-514490cd8c2b", "address": "fa:16:3e:98:53:7f", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap18760a36-7b", "ovs_interfaceid": "18760a36-7bdf-4698-95f1-514490cd8c2b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.015595] env[69475]: DEBUG nova.network.neutron [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Updating instance_info_cache with network_info: [{"id": "241f2dac-56bc-473f-8ee9-6df190c99664", "address": "fa:16:3e:b2:98:ee", "network": {"id": "49b579f0-c5a9-487e-b469-7e6420355dce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-83616973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6dd9c026624896ae4de7fab35720d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap241f2dac-56", "ovs_interfaceid": "241f2dac-56bc-473f-8ee9-6df190c99664", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.072817] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507746, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.097815} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.073493] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 682.077020] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1692bb-6bb4-4f10-b50c-32731ffb954a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.110978] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] 3149cd80-503c-42e4-ac91-54aababe84e3/3149cd80-503c-42e4-ac91-54aababe84e3.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 682.111478] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6873ffab-2201-48ce-bacb-d375bf3f9a7d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.142028] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3507747, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.142254] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 682.142254] env[69475]: value = "task-3507748" [ 682.142254] env[69475]: _type = "Task" [ 682.142254] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.154924] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507748, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.451562] env[69475]: DEBUG oslo_concurrency.lockutils [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] Releasing lock "refresh_cache-fa2ca135-3cd2-411e-b1fc-35b93a97e75d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.451562] env[69475]: DEBUG nova.compute.manager [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Received event network-changed-32a4878e-f7f5-490d-a877-d01cb7eaa6dc {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 682.451776] env[69475]: DEBUG nova.compute.manager [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Refreshing instance network info cache due to event network-changed-32a4878e-f7f5-490d-a877-d01cb7eaa6dc. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 682.451825] env[69475]: DEBUG oslo_concurrency.lockutils [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] Acquiring lock "refresh_cache-8f65d893-d2e2-452f-8870-f72ec036f16a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.454859] env[69475]: DEBUG oslo_concurrency.lockutils [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] Acquired lock "refresh_cache-8f65d893-d2e2-452f-8870-f72ec036f16a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 682.454859] env[69475]: DEBUG nova.network.neutron [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Refreshing network info cache for port 32a4878e-f7f5-490d-a877-d01cb7eaa6dc {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 682.525824] env[69475]: DEBUG oslo_concurrency.lockutils [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Releasing lock "refresh_cache-9e2d4d61-71ed-447a-b28e-c29c5bd8d763" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.525824] env[69475]: DEBUG nova.compute.manager [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Instance network_info: |[{"id": "241f2dac-56bc-473f-8ee9-6df190c99664", "address": "fa:16:3e:b2:98:ee", "network": {"id": "49b579f0-c5a9-487e-b469-7e6420355dce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-83616973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6dd9c026624896ae4de7fab35720d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap241f2dac-56", "ovs_interfaceid": "241f2dac-56bc-473f-8ee9-6df190c99664", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 682.526087] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:98:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '241f2dac-56bc-473f-8ee9-6df190c99664', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 682.544282] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Creating folder: Project (4e6dd9c026624896ae4de7fab35720d8). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 682.553017] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3ea77f05-107d-4dc4-b089-41d04fd6c74a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.570984] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Created folder: Project (4e6dd9c026624896ae4de7fab35720d8) in parent group-v700823. [ 682.571330] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Creating folder: Instances. Parent ref: group-v700908. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 682.575687] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b5fdfb12-c001-410e-a907-bd34ded70509 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.592504] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Created folder: Instances in parent group-v700908. [ 682.592504] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 682.593150] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 682.593150] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-99b16c75-562b-4746-9313-cbdd1029bcd6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.625528] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3507747, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.627909] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 682.627909] env[69475]: value = "task-3507751" [ 682.627909] env[69475]: _type = "Task" [ 682.627909] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.639828] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507751, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.656935] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507748, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.020484] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88bfc85d-a3d6-4ddb-967e-2f878823120a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.033407] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-350cc724-53e3-4b78-820f-0713c5671701 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.070575] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9433ddb0-b22c-40d2-ac72-c996befa0c3b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.082478] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-993831d0-f294-4dba-a4b1-d4270433f561 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.105197] env[69475]: DEBUG nova.compute.provider_tree [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 683.126628] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3507747, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.0677} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.126944] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] fa2ca135-3cd2-411e-b1fc-35b93a97e75d/fa2ca135-3cd2-411e-b1fc-35b93a97e75d.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 683.127242] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 683.127567] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-99310edd-646b-458b-85fd-34ca0e46106c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.141454] env[69475]: DEBUG nova.compute.manager [req-21f6d638-707a-4538-ae96-4120f713c738 req-43008a41-085c-4e47-a5a7-9953811a0db1 service nova] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Received event network-changed-241f2dac-56bc-473f-8ee9-6df190c99664 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 683.141645] env[69475]: DEBUG nova.compute.manager [req-21f6d638-707a-4538-ae96-4120f713c738 req-43008a41-085c-4e47-a5a7-9953811a0db1 service nova] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Refreshing instance network info cache due to event network-changed-241f2dac-56bc-473f-8ee9-6df190c99664. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 683.141851] env[69475]: DEBUG oslo_concurrency.lockutils [req-21f6d638-707a-4538-ae96-4120f713c738 req-43008a41-085c-4e47-a5a7-9953811a0db1 service nova] Acquiring lock "refresh_cache-9e2d4d61-71ed-447a-b28e-c29c5bd8d763" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.142371] env[69475]: DEBUG oslo_concurrency.lockutils [req-21f6d638-707a-4538-ae96-4120f713c738 req-43008a41-085c-4e47-a5a7-9953811a0db1 service nova] Acquired lock "refresh_cache-9e2d4d61-71ed-447a-b28e-c29c5bd8d763" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.142371] env[69475]: DEBUG nova.network.neutron [req-21f6d638-707a-4538-ae96-4120f713c738 req-43008a41-085c-4e47-a5a7-9953811a0db1 service nova] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Refreshing network info cache for port 241f2dac-56bc-473f-8ee9-6df190c99664 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 683.154444] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 683.154444] env[69475]: value = "task-3507752" [ 683.154444] env[69475]: _type = "Task" [ 683.154444] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.155101] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507751, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.162482] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507748, 'name': ReconfigVM_Task, 'duration_secs': 1.012534} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.163167] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Reconfigured VM instance instance-00000019 to attach disk [datastore1] 3149cd80-503c-42e4-ac91-54aababe84e3/3149cd80-503c-42e4-ac91-54aababe84e3.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 683.163836] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7328d09c-3a60-42a7-a726-e16ae7442482 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.168731] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3507752, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.174894] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 683.174894] env[69475]: value = "task-3507753" [ 683.174894] env[69475]: _type = "Task" [ 683.174894] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.186351] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507753, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.594474] env[69475]: DEBUG nova.network.neutron [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Updated VIF entry in instance network info cache for port 32a4878e-f7f5-490d-a877-d01cb7eaa6dc. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 683.595028] env[69475]: DEBUG nova.network.neutron [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Updating instance_info_cache with network_info: [{"id": "32a4878e-f7f5-490d-a877-d01cb7eaa6dc", "address": "fa:16:3e:c3:b0:ae", "network": {"id": "de4f7b85-fb16-4097-91e9-9f3cf05371be", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-742523535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c112f0c0629d4bf9a01e59342b38da87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23f4655e-3495-421d-be4e-f6002a85a47a", "external-id": "nsx-vlan-transportzone-520", "segmentation_id": 520, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32a4878e-f7", "ovs_interfaceid": "32a4878e-f7f5-490d-a877-d01cb7eaa6dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.605110] env[69475]: DEBUG nova.scheduler.client.report [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 683.638763] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507751, 'name': CreateVM_Task, 'duration_secs': 0.788298} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.639591] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 683.640318] env[69475]: DEBUG oslo_concurrency.lockutils [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.640474] env[69475]: DEBUG oslo_concurrency.lockutils [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.640829] env[69475]: DEBUG oslo_concurrency.lockutils [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 683.641362] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4057e357-5fd2-47c1-b5b5-832d9a499c8a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.647752] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 683.647752] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52db3f2c-6d4a-a92e-d671-b186351f6fa5" [ 683.647752] env[69475]: _type = "Task" [ 683.647752] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.663977] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52db3f2c-6d4a-a92e-d671-b186351f6fa5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.669873] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3507752, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085614} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.670171] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 683.671042] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1229143-8260-4a98-8919-b2f48c2aeceb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.697937] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] fa2ca135-3cd2-411e-b1fc-35b93a97e75d/fa2ca135-3cd2-411e-b1fc-35b93a97e75d.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 683.698732] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24bcac2c-52bd-4f50-b538-fd0787196011 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.718641] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507753, 'name': Rename_Task, 'duration_secs': 0.161148} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.719493] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 683.719746] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-770ca747-9d7f-4944-aebe-b13447e43194 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.726444] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 683.726444] env[69475]: value = "task-3507754" [ 683.726444] env[69475]: _type = "Task" [ 683.726444] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.727927] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 683.727927] env[69475]: value = "task-3507755" [ 683.727927] env[69475]: _type = "Task" [ 683.727927] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.745225] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507755, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.745225] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3507754, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.031477] env[69475]: DEBUG nova.network.neutron [req-21f6d638-707a-4538-ae96-4120f713c738 req-43008a41-085c-4e47-a5a7-9953811a0db1 service nova] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Updated VIF entry in instance network info cache for port 241f2dac-56bc-473f-8ee9-6df190c99664. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 684.032035] env[69475]: DEBUG nova.network.neutron [req-21f6d638-707a-4538-ae96-4120f713c738 req-43008a41-085c-4e47-a5a7-9953811a0db1 service nova] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Updating instance_info_cache with network_info: [{"id": "241f2dac-56bc-473f-8ee9-6df190c99664", "address": "fa:16:3e:b2:98:ee", "network": {"id": "49b579f0-c5a9-487e-b469-7e6420355dce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-83616973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6dd9c026624896ae4de7fab35720d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap241f2dac-56", "ovs_interfaceid": "241f2dac-56bc-473f-8ee9-6df190c99664", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.098048] env[69475]: DEBUG oslo_concurrency.lockutils [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] Releasing lock "refresh_cache-8f65d893-d2e2-452f-8870-f72ec036f16a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.098364] env[69475]: DEBUG nova.compute.manager [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Received event network-vif-plugged-241f2dac-56bc-473f-8ee9-6df190c99664 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 684.098555] env[69475]: DEBUG oslo_concurrency.lockutils [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] Acquiring lock "9e2d4d61-71ed-447a-b28e-c29c5bd8d763-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.098801] env[69475]: DEBUG oslo_concurrency.lockutils [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] Lock "9e2d4d61-71ed-447a-b28e-c29c5bd8d763-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.098981] env[69475]: DEBUG oslo_concurrency.lockutils [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] Lock "9e2d4d61-71ed-447a-b28e-c29c5bd8d763-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.099165] env[69475]: DEBUG nova.compute.manager [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] No waiting events found dispatching network-vif-plugged-241f2dac-56bc-473f-8ee9-6df190c99664 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 684.099328] env[69475]: WARNING nova.compute.manager [req-3da7957d-3d95-4289-88a8-f15a60b5d85a req-d19f0bdc-8509-49de-bc45-ee9a1865e783 service nova] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Received unexpected event network-vif-plugged-241f2dac-56bc-473f-8ee9-6df190c99664 for instance with vm_state building and task_state spawning. [ 684.113416] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.816s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.113416] env[69475]: DEBUG nova.compute.manager [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 684.114664] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.636s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.116106] env[69475]: INFO nova.compute.claims [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 684.162323] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52db3f2c-6d4a-a92e-d671-b186351f6fa5, 'name': SearchDatastore_Task, 'duration_secs': 0.014314} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.162846] env[69475]: DEBUG oslo_concurrency.lockutils [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.163203] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 684.163472] env[69475]: DEBUG oslo_concurrency.lockutils [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.163654] env[69475]: DEBUG oslo_concurrency.lockutils [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.163802] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 684.164077] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1328e23a-07e8-4231-93f8-4981adcd26ba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.174178] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 684.174937] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 684.177442] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88fe36f5-ffff-450c-8b23-4a3a19ce0641 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.185614] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 684.185614] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d43ada-2254-0dfe-0f7e-41065bfdc5e9" [ 684.185614] env[69475]: _type = "Task" [ 684.185614] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.195835] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d43ada-2254-0dfe-0f7e-41065bfdc5e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.246107] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3507754, 'name': ReconfigVM_Task, 'duration_secs': 0.374465} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.246362] env[69475]: DEBUG oslo_vmware.api [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507755, 'name': PowerOnVM_Task, 'duration_secs': 0.488962} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.247767] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Reconfigured VM instance instance-0000001b to attach disk [datastore1] fa2ca135-3cd2-411e-b1fc-35b93a97e75d/fa2ca135-3cd2-411e-b1fc-35b93a97e75d.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 684.248548] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 684.249154] env[69475]: DEBUG nova.compute.manager [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 684.250102] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-da4ac44e-7a32-4a02-9498-067ca713df07 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.253340] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ad28f2-9118-46e2-bec8-023f0153bdb2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.265197] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 684.265197] env[69475]: value = "task-3507756" [ 684.265197] env[69475]: _type = "Task" [ 684.265197] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.275366] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3507756, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.289314] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Acquiring lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.290446] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.291837] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Acquiring lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.291837] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.291837] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.294999] env[69475]: INFO nova.compute.manager [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Terminating instance [ 684.535540] env[69475]: DEBUG oslo_concurrency.lockutils [req-21f6d638-707a-4538-ae96-4120f713c738 req-43008a41-085c-4e47-a5a7-9953811a0db1 service nova] Releasing lock "refresh_cache-9e2d4d61-71ed-447a-b28e-c29c5bd8d763" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 684.535813] env[69475]: DEBUG nova.compute.manager [req-21f6d638-707a-4538-ae96-4120f713c738 req-43008a41-085c-4e47-a5a7-9953811a0db1 service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Received event network-changed-32a4878e-f7f5-490d-a877-d01cb7eaa6dc {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 684.535982] env[69475]: DEBUG nova.compute.manager [req-21f6d638-707a-4538-ae96-4120f713c738 req-43008a41-085c-4e47-a5a7-9953811a0db1 service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Refreshing instance network info cache due to event network-changed-32a4878e-f7f5-490d-a877-d01cb7eaa6dc. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 684.536221] env[69475]: DEBUG oslo_concurrency.lockutils [req-21f6d638-707a-4538-ae96-4120f713c738 req-43008a41-085c-4e47-a5a7-9953811a0db1 service nova] Acquiring lock "refresh_cache-8f65d893-d2e2-452f-8870-f72ec036f16a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.536366] env[69475]: DEBUG oslo_concurrency.lockutils [req-21f6d638-707a-4538-ae96-4120f713c738 req-43008a41-085c-4e47-a5a7-9953811a0db1 service nova] Acquired lock "refresh_cache-8f65d893-d2e2-452f-8870-f72ec036f16a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.536526] env[69475]: DEBUG nova.network.neutron [req-21f6d638-707a-4538-ae96-4120f713c738 req-43008a41-085c-4e47-a5a7-9953811a0db1 service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Refreshing network info cache for port 32a4878e-f7f5-490d-a877-d01cb7eaa6dc {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 684.621687] env[69475]: DEBUG nova.compute.utils [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 684.625331] env[69475]: DEBUG nova.compute.manager [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 684.625542] env[69475]: DEBUG nova.network.neutron [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 684.685330] env[69475]: DEBUG nova.policy [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed78cec9326c42b6b9bd50e64abba5b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b0f344232f8e431e89a27e224dc13412', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 684.699865] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d43ada-2254-0dfe-0f7e-41065bfdc5e9, 'name': SearchDatastore_Task, 'duration_secs': 0.027823} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.700754] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e938da6c-b85d-44f5-91b3-7d569784515a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.708729] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 684.708729] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525c24bf-2db7-d06a-9411-2369dfda6e47" [ 684.708729] env[69475]: _type = "Task" [ 684.708729] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.720641] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525c24bf-2db7-d06a-9411-2369dfda6e47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.775791] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.782667] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3507756, 'name': Rename_Task, 'duration_secs': 0.333344} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.782941] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 684.783296] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8ff5b843-aed0-451b-83f4-1e8a9789abc1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.791350] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 684.791350] env[69475]: value = "task-3507757" [ 684.791350] env[69475]: _type = "Task" [ 684.791350] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.803916] env[69475]: DEBUG nova.compute.manager [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 684.804202] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 684.804532] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3507757, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.805313] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2c4632-4108-4803-82ca-4a55aed0a760 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.812633] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 684.812900] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d5e3746-4d03-407b-bc2a-ea3d3177a8cf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.821622] env[69475]: DEBUG oslo_vmware.api [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Waiting for the task: (returnval){ [ 684.821622] env[69475]: value = "task-3507758" [ 684.821622] env[69475]: _type = "Task" [ 684.821622] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.837838] env[69475]: DEBUG oslo_vmware.api [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507758, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.130232] env[69475]: DEBUG nova.compute.manager [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 685.165270] env[69475]: DEBUG nova.network.neutron [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Successfully created port: 01154e4e-cf6b-4d07-92e9-8c4c58376888 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 685.224307] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525c24bf-2db7-d06a-9411-2369dfda6e47, 'name': SearchDatastore_Task, 'duration_secs': 0.028582} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.226928] env[69475]: DEBUG oslo_concurrency.lockutils [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.227212] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 9e2d4d61-71ed-447a-b28e-c29c5bd8d763/9e2d4d61-71ed-447a-b28e-c29c5bd8d763.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 685.227674] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-64892f6f-df19-4672-8f8e-718932f8a23f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.236266] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 685.236266] env[69475]: value = "task-3507759" [ 685.236266] env[69475]: _type = "Task" [ 685.236266] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.247890] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507759, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.302032] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3507757, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.337321] env[69475]: DEBUG oslo_vmware.api [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507758, 'name': PowerOffVM_Task, 'duration_secs': 0.244992} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.337615] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 685.337852] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 685.338082] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e8c7b22-4be8-4547-8b78-d36cfcb3947f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.413909] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 685.414183] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 685.414371] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Deleting the datastore file [datastore2] 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 685.414634] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d05cdbdc-e6b1-4a17-8435-76b593f68aee {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.426189] env[69475]: DEBUG oslo_vmware.api [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Waiting for the task: (returnval){ [ 685.426189] env[69475]: value = "task-3507761" [ 685.426189] env[69475]: _type = "Task" [ 685.426189] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.435782] env[69475]: DEBUG oslo_vmware.api [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507761, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.446994] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Acquiring lock "8f65d893-d2e2-452f-8870-f72ec036f16a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.447274] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Lock "8f65d893-d2e2-452f-8870-f72ec036f16a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.447580] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Acquiring lock "8f65d893-d2e2-452f-8870-f72ec036f16a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.447720] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Lock "8f65d893-d2e2-452f-8870-f72ec036f16a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.447890] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Lock "8f65d893-d2e2-452f-8870-f72ec036f16a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.452806] env[69475]: INFO nova.compute.manager [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Terminating instance [ 685.495047] env[69475]: INFO nova.compute.manager [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Rebuilding instance [ 685.552702] env[69475]: DEBUG nova.compute.manager [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 685.553636] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e96b4fe-a5e4-445b-9978-d456ea01c9b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.751133] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507759, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.788814] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25bfd9c-cd28-4936-8fbf-3ec8312d296e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.802308] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d85d3c7-682d-4027-8a51-0910b94a86a2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.807211] env[69475]: DEBUG oslo_vmware.api [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3507757, 'name': PowerOnVM_Task, 'duration_secs': 0.855459} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.807888] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 685.808117] env[69475]: INFO nova.compute.manager [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Took 9.56 seconds to spawn the instance on the hypervisor. [ 685.808296] env[69475]: DEBUG nova.compute.manager [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 685.809134] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8fdbf1c-9323-41e5-917f-f57c0cf48e6e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.842313] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42c81e0-f7f4-4010-82c6-ab6d60a8b2af {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.856785] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd854039-3608-4742-906b-d09ed101bebd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.872437] env[69475]: DEBUG nova.compute.provider_tree [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 685.886120] env[69475]: DEBUG nova.network.neutron [req-21f6d638-707a-4538-ae96-4120f713c738 req-43008a41-085c-4e47-a5a7-9953811a0db1 service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Updated VIF entry in instance network info cache for port 32a4878e-f7f5-490d-a877-d01cb7eaa6dc. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 685.886120] env[69475]: DEBUG nova.network.neutron [req-21f6d638-707a-4538-ae96-4120f713c738 req-43008a41-085c-4e47-a5a7-9953811a0db1 service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Updating instance_info_cache with network_info: [{"id": "32a4878e-f7f5-490d-a877-d01cb7eaa6dc", "address": "fa:16:3e:c3:b0:ae", "network": {"id": "de4f7b85-fb16-4097-91e9-9f3cf05371be", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-742523535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c112f0c0629d4bf9a01e59342b38da87", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "23f4655e-3495-421d-be4e-f6002a85a47a", "external-id": "nsx-vlan-transportzone-520", "segmentation_id": 520, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32a4878e-f7", "ovs_interfaceid": "32a4878e-f7f5-490d-a877-d01cb7eaa6dc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 685.936109] env[69475]: DEBUG oslo_vmware.api [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Task: {'id': task-3507761, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.28272} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.936366] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 685.936545] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 685.936729] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 685.936940] env[69475]: INFO nova.compute.manager [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Took 1.13 seconds to destroy the instance on the hypervisor. [ 685.937194] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 685.937381] env[69475]: DEBUG nova.compute.manager [-] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 685.937476] env[69475]: DEBUG nova.network.neutron [-] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 685.958017] env[69475]: DEBUG nova.compute.manager [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 685.958017] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 685.959068] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36487654-37a4-4702-b778-97c80809402d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.967732] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 685.968055] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-565e5a93-7074-4cdb-a32c-3b8d65d513ff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.975967] env[69475]: DEBUG oslo_vmware.api [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Waiting for the task: (returnval){ [ 685.975967] env[69475]: value = "task-3507762" [ 685.975967] env[69475]: _type = "Task" [ 685.975967] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.985717] env[69475]: DEBUG oslo_vmware.api [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': task-3507762, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.148333] env[69475]: DEBUG nova.compute.manager [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 686.175672] env[69475]: DEBUG nova.virt.hardware [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 686.175969] env[69475]: DEBUG nova.virt.hardware [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 686.176180] env[69475]: DEBUG nova.virt.hardware [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 686.176372] env[69475]: DEBUG nova.virt.hardware [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 686.176518] env[69475]: DEBUG nova.virt.hardware [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 686.176735] env[69475]: DEBUG nova.virt.hardware [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 686.176869] env[69475]: DEBUG nova.virt.hardware [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 686.177038] env[69475]: DEBUG nova.virt.hardware [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 686.177246] env[69475]: DEBUG nova.virt.hardware [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 686.177412] env[69475]: DEBUG nova.virt.hardware [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 686.177585] env[69475]: DEBUG nova.virt.hardware [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 686.178479] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ecb2086-5900-40f5-84d1-afaf1714c037 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.188830] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c48509-b729-4d82-ba66-693c4c4726a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.249609] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507759, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.648153} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.250178] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 9e2d4d61-71ed-447a-b28e-c29c5bd8d763/9e2d4d61-71ed-447a-b28e-c29c5bd8d763.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 686.251672] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 686.251672] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b6377b1a-f956-47be-b475-775c0be5c472 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.260701] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 686.260701] env[69475]: value = "task-3507763" [ 686.260701] env[69475]: _type = "Task" [ 686.260701] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.272642] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507763, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.361045] env[69475]: INFO nova.compute.manager [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Took 34.17 seconds to build instance. [ 686.375980] env[69475]: DEBUG nova.scheduler.client.report [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 686.387994] env[69475]: DEBUG oslo_concurrency.lockutils [req-21f6d638-707a-4538-ae96-4120f713c738 req-43008a41-085c-4e47-a5a7-9953811a0db1 service nova] Releasing lock "refresh_cache-8f65d893-d2e2-452f-8870-f72ec036f16a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.487029] env[69475]: DEBUG oslo_vmware.api [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': task-3507762, 'name': PowerOffVM_Task, 'duration_secs': 0.3839} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.487338] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 686.487511] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 686.487794] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-51de26f3-d7de-44be-a4df-775871dc4fb7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.572121] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 686.572451] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5eb90aef-c236-469c-8cfc-11f8354dfd6a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.583210] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 686.583210] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 686.583210] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Deleting the datastore file [datastore1] 8f65d893-d2e2-452f-8870-f72ec036f16a {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 686.583458] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa7e1b51-cd54-4d1a-b13e-d2b76cdd2f25 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.585356] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Waiting for the task: (returnval){ [ 686.585356] env[69475]: value = "task-3507765" [ 686.585356] env[69475]: _type = "Task" [ 686.585356] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.593477] env[69475]: DEBUG oslo_vmware.api [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Waiting for the task: (returnval){ [ 686.593477] env[69475]: value = "task-3507766" [ 686.593477] env[69475]: _type = "Task" [ 686.593477] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.596618] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': task-3507765, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.606731] env[69475]: DEBUG oslo_vmware.api [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': task-3507766, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.770992] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507763, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092128} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.771661] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 686.772115] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f1bf2e-7dcc-4300-b4b3-e4daf3bc7c62 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.795139] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 9e2d4d61-71ed-447a-b28e-c29c5bd8d763/9e2d4d61-71ed-447a-b28e-c29c5bd8d763.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 686.796590] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-980afea9-2f60-4991-a9d8-287d018179b6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.824200] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 686.824200] env[69475]: value = "task-3507767" [ 686.824200] env[69475]: _type = "Task" [ 686.824200] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.831788] env[69475]: DEBUG nova.compute.manager [req-e6c3744e-c42f-478b-89d5-718294ec6395 req-f92ddaf3-259c-49b4-a854-54d4f63fcb75 service nova] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Received event network-vif-deleted-806e8096-632b-4993-a27c-3eb4767e9d00 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 686.831904] env[69475]: INFO nova.compute.manager [req-e6c3744e-c42f-478b-89d5-718294ec6395 req-f92ddaf3-259c-49b4-a854-54d4f63fcb75 service nova] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Neutron deleted interface 806e8096-632b-4993-a27c-3eb4767e9d00; detaching it from the instance and deleting it from the info cache [ 686.832083] env[69475]: DEBUG nova.network.neutron [req-e6c3744e-c42f-478b-89d5-718294ec6395 req-f92ddaf3-259c-49b4-a854-54d4f63fcb75 service nova] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.837299] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507767, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.862994] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2b14b873-8c44-4094-b74a-80bf3c3e2eba tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "fa2ca135-3cd2-411e-b1fc-35b93a97e75d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.128s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.882525] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.767s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 686.882525] env[69475]: DEBUG nova.compute.manager [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 686.885595] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.972s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 686.888317] env[69475]: INFO nova.compute.claims [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 687.096848] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': task-3507765, 'name': PowerOffVM_Task, 'duration_secs': 0.294925} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.099901] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 687.100230] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 687.101255] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f0e5ee-b2c8-4941-b46d-6d2b757b04ae {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.110467] env[69475]: DEBUG oslo_vmware.api [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': task-3507766, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.112669] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 687.112932] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e21d0f2e-21a6-4cd1-a333-ff83db05cc37 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.145870] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 687.146081] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 687.146282] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Deleting the datastore file [datastore1] 3149cd80-503c-42e4-ac91-54aababe84e3 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 687.146550] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00b311e4-bb8a-4f2a-b47a-0c6afa3b7787 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.154419] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Waiting for the task: (returnval){ [ 687.154419] env[69475]: value = "task-3507769" [ 687.154419] env[69475]: _type = "Task" [ 687.154419] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.163600] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': task-3507769, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.318376] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "fa2ca135-3cd2-411e-b1fc-35b93a97e75d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.318701] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "fa2ca135-3cd2-411e-b1fc-35b93a97e75d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.319029] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "fa2ca135-3cd2-411e-b1fc-35b93a97e75d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.319168] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "fa2ca135-3cd2-411e-b1fc-35b93a97e75d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 687.319342] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "fa2ca135-3cd2-411e-b1fc-35b93a97e75d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 687.322009] env[69475]: DEBUG nova.network.neutron [-] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.323467] env[69475]: INFO nova.compute.manager [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Terminating instance [ 687.336360] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507767, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.338573] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b17c4e6a-f829-490b-8a29-31f85a74296a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.348807] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf35be19-6b60-496c-9c18-be30a6967695 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.366774] env[69475]: DEBUG nova.compute.manager [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 687.385282] env[69475]: DEBUG nova.compute.manager [req-e6c3744e-c42f-478b-89d5-718294ec6395 req-f92ddaf3-259c-49b4-a854-54d4f63fcb75 service nova] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Detach interface failed, port_id=806e8096-632b-4993-a27c-3eb4767e9d00, reason: Instance 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 687.394059] env[69475]: DEBUG nova.compute.utils [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 687.397498] env[69475]: DEBUG nova.compute.manager [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 687.397809] env[69475]: DEBUG nova.network.neutron [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 687.479115] env[69475]: DEBUG nova.policy [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b022fbe9c8e64867806e65730bceb429', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '25294f782189432c852adf8bd89f363a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 687.610911] env[69475]: DEBUG oslo_vmware.api [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Task: {'id': task-3507766, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.539058} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.610911] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 687.610911] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 687.611409] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 687.611969] env[69475]: INFO nova.compute.manager [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Took 1.65 seconds to destroy the instance on the hypervisor. [ 687.612874] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 687.614019] env[69475]: DEBUG nova.compute.manager [-] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 687.614019] env[69475]: DEBUG nova.network.neutron [-] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 687.632261] env[69475]: DEBUG nova.network.neutron [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Successfully updated port: 01154e4e-cf6b-4d07-92e9-8c4c58376888 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 687.665871] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': task-3507769, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.348463} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.666615] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 687.666878] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 687.667109] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 687.739024] env[69475]: DEBUG nova.compute.manager [None req-86b773c4-9d84-4c50-b054-0a63b2ed869b tempest-ServerDiagnosticsV248Test-1962308902 tempest-ServerDiagnosticsV248Test-1962308902-project-admin] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 687.740101] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd7247eb-ebfe-4bba-aff1-d6eabe9fca8c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.749172] env[69475]: INFO nova.compute.manager [None req-86b773c4-9d84-4c50-b054-0a63b2ed869b tempest-ServerDiagnosticsV248Test-1962308902 tempest-ServerDiagnosticsV248Test-1962308902-project-admin] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Retrieving diagnostics [ 687.750125] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810344a6-3a70-4b7e-b831-9db9785c402f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.825839] env[69475]: INFO nova.compute.manager [-] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Took 1.89 seconds to deallocate network for instance. [ 687.831901] env[69475]: DEBUG nova.compute.manager [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 687.832209] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 687.836577] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6af86f8-aa05-4d47-a775-0f2d8b981143 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.843365] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507767, 'name': ReconfigVM_Task, 'duration_secs': 0.682831} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.843946] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 9e2d4d61-71ed-447a-b28e-c29c5bd8d763/9e2d4d61-71ed-447a-b28e-c29c5bd8d763.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 687.844600] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-41e4b659-c295-401f-8487-9466e92c86f0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.848745] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 687.849042] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99801fbd-7e32-4e25-8f24-5888d7326f45 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.852202] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 687.852202] env[69475]: value = "task-3507770" [ 687.852202] env[69475]: _type = "Task" [ 687.852202] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.856510] env[69475]: DEBUG oslo_vmware.api [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 687.856510] env[69475]: value = "task-3507771" [ 687.856510] env[69475]: _type = "Task" [ 687.856510] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.864556] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507770, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.875124] env[69475]: DEBUG oslo_vmware.api [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3507771, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.898413] env[69475]: DEBUG oslo_concurrency.lockutils [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 687.899406] env[69475]: DEBUG nova.compute.manager [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 687.943362] env[69475]: DEBUG nova.network.neutron [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Successfully created port: c1fc8b83-7009-439e-b6cd-c8b86d680b84 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 688.080801] env[69475]: DEBUG nova.compute.manager [req-cde2d83b-330f-4bba-8201-db2161852f88 req-0e3bdcb2-3ed4-4dd5-b8c2-f2b7e102174d service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Received event network-vif-deleted-32a4878e-f7f5-490d-a877-d01cb7eaa6dc {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 688.081014] env[69475]: INFO nova.compute.manager [req-cde2d83b-330f-4bba-8201-db2161852f88 req-0e3bdcb2-3ed4-4dd5-b8c2-f2b7e102174d service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Neutron deleted interface 32a4878e-f7f5-490d-a877-d01cb7eaa6dc; detaching it from the instance and deleting it from the info cache [ 688.081192] env[69475]: DEBUG nova.network.neutron [req-cde2d83b-330f-4bba-8201-db2161852f88 req-0e3bdcb2-3ed4-4dd5-b8c2-f2b7e102174d service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.137342] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Acquiring lock "refresh_cache-e48e2cc1-7d60-457f-8f1c-649f0dda8cdb" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 688.137422] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Acquired lock "refresh_cache-e48e2cc1-7d60-457f-8f1c-649f0dda8cdb" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 688.137640] env[69475]: DEBUG nova.network.neutron [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 688.342772] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.364258] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507770, 'name': Rename_Task, 'duration_secs': 0.164064} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.364892] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 688.365165] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f765471-1124-4573-8a4c-f23baf3186c9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.374809] env[69475]: DEBUG oslo_vmware.api [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3507771, 'name': PowerOffVM_Task, 'duration_secs': 0.195345} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.374809] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 688.374809] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 688.374809] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43566c06-b985-4001-813c-795cb018ef83 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.378194] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 688.378194] env[69475]: value = "task-3507772" [ 688.378194] env[69475]: _type = "Task" [ 688.378194] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.387648] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507772, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.418820] env[69475]: DEBUG nova.network.neutron [-] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.442061] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 688.442323] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 688.442460] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleting the datastore file [datastore1] fa2ca135-3cd2-411e-b1fc-35b93a97e75d {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 688.443014] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d0c35c9-4af0-410e-b945-9ab20bcaf02c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.454902] env[69475]: DEBUG oslo_vmware.api [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 688.454902] env[69475]: value = "task-3507774" [ 688.454902] env[69475]: _type = "Task" [ 688.454902] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.467957] env[69475]: DEBUG oslo_vmware.api [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3507774, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.558291] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3677fff6-c74a-4d3b-90fe-9051870ae00b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.566631] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac3d5f03-687b-4814-a3df-c2869bda4c90 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.606968] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-da1b80d9-c3fd-4693-8bdc-f7520f1e6d9d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.609580] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba94570-28ae-417c-a9d3-91b970824ad5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.619127] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43bcc010-3b3c-4511-b271-80face18c18d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.627993] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782de141-5337-4311-80bc-fb87bec2ffa6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.656142] env[69475]: DEBUG nova.compute.provider_tree [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 688.673081] env[69475]: DEBUG nova.compute.manager [req-cde2d83b-330f-4bba-8201-db2161852f88 req-0e3bdcb2-3ed4-4dd5-b8c2-f2b7e102174d service nova] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Detach interface failed, port_id=32a4878e-f7f5-490d-a877-d01cb7eaa6dc, reason: Instance 8f65d893-d2e2-452f-8870-f72ec036f16a could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 688.674329] env[69475]: DEBUG nova.scheduler.client.report [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 688.703224] env[69475]: DEBUG nova.network.neutron [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 688.708356] env[69475]: DEBUG nova.virt.hardware [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 688.708656] env[69475]: DEBUG nova.virt.hardware [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 688.708848] env[69475]: DEBUG nova.virt.hardware [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 688.709079] env[69475]: DEBUG nova.virt.hardware [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 688.709236] env[69475]: DEBUG nova.virt.hardware [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 688.709400] env[69475]: DEBUG nova.virt.hardware [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 688.709656] env[69475]: DEBUG nova.virt.hardware [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 688.709839] env[69475]: DEBUG nova.virt.hardware [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 688.710035] env[69475]: DEBUG nova.virt.hardware [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 688.710219] env[69475]: DEBUG nova.virt.hardware [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 688.710413] env[69475]: DEBUG nova.virt.hardware [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 688.711376] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a403d072-8ddd-4143-a3b9-b428203fef67 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.721262] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ea2a3d-36b9-43ea-a919-20674cc58639 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.740277] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Instance VIF info [] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 688.747578] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 688.747903] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 688.748188] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-61001c33-c09b-40a0-83ea-06f6643b7257 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.772789] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 688.772789] env[69475]: value = "task-3507775" [ 688.772789] env[69475]: _type = "Task" [ 688.772789] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.781394] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507775, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.870212] env[69475]: DEBUG nova.network.neutron [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Updating instance_info_cache with network_info: [{"id": "01154e4e-cf6b-4d07-92e9-8c4c58376888", "address": "fa:16:3e:bc:0c:d3", "network": {"id": "183d8509-9270-4917-9966-4eb41d4c85fb", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-871424087-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b0f344232f8e431e89a27e224dc13412", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01154e4e-cf", "ovs_interfaceid": "01154e4e-cf6b-4d07-92e9-8c4c58376888", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 688.890847] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507772, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.910433] env[69475]: DEBUG nova.compute.manager [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 688.926947] env[69475]: INFO nova.compute.manager [-] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Took 1.31 seconds to deallocate network for instance. [ 688.939282] env[69475]: DEBUG nova.virt.hardware [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 688.939282] env[69475]: DEBUG nova.virt.hardware [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 688.939282] env[69475]: DEBUG nova.virt.hardware [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 688.939435] env[69475]: DEBUG nova.virt.hardware [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 688.939435] env[69475]: DEBUG nova.virt.hardware [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 688.939435] env[69475]: DEBUG nova.virt.hardware [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 688.939435] env[69475]: DEBUG nova.virt.hardware [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 688.939435] env[69475]: DEBUG nova.virt.hardware [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 688.939635] env[69475]: DEBUG nova.virt.hardware [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 688.939635] env[69475]: DEBUG nova.virt.hardware [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 688.939635] env[69475]: DEBUG nova.virt.hardware [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 688.940019] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25588dff-f8ed-488b-841e-7697116e476a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.950447] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0f894f-3788-4e61-ac3b-b48d8708f041 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.975512] env[69475]: DEBUG oslo_vmware.api [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3507774, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16072} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.975894] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 688.976209] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 688.976502] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 688.976798] env[69475]: INFO nova.compute.manager [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 688.977190] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 688.977473] env[69475]: DEBUG nova.compute.manager [-] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 688.977647] env[69475]: DEBUG nova.network.neutron [-] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 689.047492] env[69475]: DEBUG nova.compute.manager [req-a616b277-bacc-41f3-9d5a-72df044c328a req-05f03338-41fe-433a-a3df-dc2568adb1e3 service nova] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Received event network-vif-plugged-01154e4e-cf6b-4d07-92e9-8c4c58376888 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 689.047728] env[69475]: DEBUG oslo_concurrency.lockutils [req-a616b277-bacc-41f3-9d5a-72df044c328a req-05f03338-41fe-433a-a3df-dc2568adb1e3 service nova] Acquiring lock "e48e2cc1-7d60-457f-8f1c-649f0dda8cdb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.048152] env[69475]: DEBUG oslo_concurrency.lockutils [req-a616b277-bacc-41f3-9d5a-72df044c328a req-05f03338-41fe-433a-a3df-dc2568adb1e3 service nova] Lock "e48e2cc1-7d60-457f-8f1c-649f0dda8cdb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.048432] env[69475]: DEBUG oslo_concurrency.lockutils [req-a616b277-bacc-41f3-9d5a-72df044c328a req-05f03338-41fe-433a-a3df-dc2568adb1e3 service nova] Lock "e48e2cc1-7d60-457f-8f1c-649f0dda8cdb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.048568] env[69475]: DEBUG nova.compute.manager [req-a616b277-bacc-41f3-9d5a-72df044c328a req-05f03338-41fe-433a-a3df-dc2568adb1e3 service nova] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] No waiting events found dispatching network-vif-plugged-01154e4e-cf6b-4d07-92e9-8c4c58376888 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 689.048801] env[69475]: WARNING nova.compute.manager [req-a616b277-bacc-41f3-9d5a-72df044c328a req-05f03338-41fe-433a-a3df-dc2568adb1e3 service nova] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Received unexpected event network-vif-plugged-01154e4e-cf6b-4d07-92e9-8c4c58376888 for instance with vm_state building and task_state spawning. [ 689.048801] env[69475]: DEBUG nova.compute.manager [req-a616b277-bacc-41f3-9d5a-72df044c328a req-05f03338-41fe-433a-a3df-dc2568adb1e3 service nova] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Received event network-changed-01154e4e-cf6b-4d07-92e9-8c4c58376888 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 689.048950] env[69475]: DEBUG nova.compute.manager [req-a616b277-bacc-41f3-9d5a-72df044c328a req-05f03338-41fe-433a-a3df-dc2568adb1e3 service nova] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Refreshing instance network info cache due to event network-changed-01154e4e-cf6b-4d07-92e9-8c4c58376888. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 689.049171] env[69475]: DEBUG oslo_concurrency.lockutils [req-a616b277-bacc-41f3-9d5a-72df044c328a req-05f03338-41fe-433a-a3df-dc2568adb1e3 service nova] Acquiring lock "refresh_cache-e48e2cc1-7d60-457f-8f1c-649f0dda8cdb" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.150278] env[69475]: DEBUG oslo_concurrency.lockutils [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Acquiring lock "b87cac84-ea70-428b-872e-4f6145e36b39" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.150278] env[69475]: DEBUG oslo_concurrency.lockutils [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Lock "b87cac84-ea70-428b-872e-4f6145e36b39" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.150278] env[69475]: DEBUG oslo_concurrency.lockutils [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Acquiring lock "b87cac84-ea70-428b-872e-4f6145e36b39-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.150278] env[69475]: DEBUG oslo_concurrency.lockutils [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Lock "b87cac84-ea70-428b-872e-4f6145e36b39-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.150580] env[69475]: DEBUG oslo_concurrency.lockutils [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Lock "b87cac84-ea70-428b-872e-4f6145e36b39-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.153125] env[69475]: INFO nova.compute.manager [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Terminating instance [ 689.182453] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.297s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.184694] env[69475]: DEBUG nova.compute.manager [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 689.192243] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.102s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.192243] env[69475]: DEBUG nova.objects.instance [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Lazy-loading 'resources' on Instance uuid 3c253a57-1c93-4e8d-aaa1-1331c0547d85 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 689.288033] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507775, 'name': CreateVM_Task, 'duration_secs': 0.499614} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.288033] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 689.288033] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.288033] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.288033] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 689.288033] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e168257-1a86-45f5-b1c6-94c3dac74ee4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.292069] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Waiting for the task: (returnval){ [ 689.292069] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5238a896-279d-635e-1b0e-7c96bea10cc7" [ 689.292069] env[69475]: _type = "Task" [ 689.292069] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.302027] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5238a896-279d-635e-1b0e-7c96bea10cc7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.376020] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Releasing lock "refresh_cache-e48e2cc1-7d60-457f-8f1c-649f0dda8cdb" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.376020] env[69475]: DEBUG nova.compute.manager [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Instance network_info: |[{"id": "01154e4e-cf6b-4d07-92e9-8c4c58376888", "address": "fa:16:3e:bc:0c:d3", "network": {"id": "183d8509-9270-4917-9966-4eb41d4c85fb", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-871424087-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b0f344232f8e431e89a27e224dc13412", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01154e4e-cf", "ovs_interfaceid": "01154e4e-cf6b-4d07-92e9-8c4c58376888", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 689.376456] env[69475]: DEBUG oslo_concurrency.lockutils [req-a616b277-bacc-41f3-9d5a-72df044c328a req-05f03338-41fe-433a-a3df-dc2568adb1e3 service nova] Acquired lock "refresh_cache-e48e2cc1-7d60-457f-8f1c-649f0dda8cdb" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.376456] env[69475]: DEBUG nova.network.neutron [req-a616b277-bacc-41f3-9d5a-72df044c328a req-05f03338-41fe-433a-a3df-dc2568adb1e3 service nova] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Refreshing network info cache for port 01154e4e-cf6b-4d07-92e9-8c4c58376888 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 689.376456] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bc:0c:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '438671d0-9468-4e44-84c1-4c0ebaa743e0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '01154e4e-cf6b-4d07-92e9-8c4c58376888', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 689.385982] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Creating folder: Project (b0f344232f8e431e89a27e224dc13412). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 689.387306] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ae681377-266f-4660-90ea-45b75b7b2c7f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.400304] env[69475]: DEBUG oslo_vmware.api [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507772, 'name': PowerOnVM_Task, 'duration_secs': 0.575335} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.400852] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 689.401220] env[69475]: INFO nova.compute.manager [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Took 10.43 seconds to spawn the instance on the hypervisor. [ 689.401536] env[69475]: DEBUG nova.compute.manager [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 689.402710] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-330bc02c-e88a-4a38-b414-8146f8bae9a7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.408436] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Created folder: Project (b0f344232f8e431e89a27e224dc13412) in parent group-v700823. [ 689.408814] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Creating folder: Instances. Parent ref: group-v700912. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 689.409488] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a6ee265-f8ca-43b8-83d6-8b671fa5b296 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.425061] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Created folder: Instances in parent group-v700912. [ 689.425061] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 689.425061] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 689.425061] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f08b3b8-b441-4acf-906b-8661769029e6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.445270] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.448209] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 689.448209] env[69475]: value = "task-3507778" [ 689.448209] env[69475]: _type = "Task" [ 689.448209] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.456897] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507778, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.659011] env[69475]: DEBUG oslo_concurrency.lockutils [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Acquiring lock "refresh_cache-b87cac84-ea70-428b-872e-4f6145e36b39" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.659290] env[69475]: DEBUG oslo_concurrency.lockutils [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Acquired lock "refresh_cache-b87cac84-ea70-428b-872e-4f6145e36b39" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.659530] env[69475]: DEBUG nova.network.neutron [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 689.692732] env[69475]: DEBUG nova.compute.utils [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 689.694141] env[69475]: DEBUG nova.compute.manager [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 689.694318] env[69475]: DEBUG nova.network.neutron [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 689.744121] env[69475]: DEBUG nova.network.neutron [-] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 689.750652] env[69475]: DEBUG nova.policy [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8226fd6a5e84472abca2df0b3597b85a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '694f14f9b2e64d769ca5ced4d71110c4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 689.753603] env[69475]: DEBUG nova.network.neutron [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Successfully updated port: c1fc8b83-7009-439e-b6cd-c8b86d680b84 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 689.804585] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5238a896-279d-635e-1b0e-7c96bea10cc7, 'name': SearchDatastore_Task, 'duration_secs': 0.010394} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.804900] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.805247] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 689.805537] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.805744] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.805978] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 689.808783] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-459cfe39-eacb-4bd8-a9b2-0f9746fbc607 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.820305] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 689.821017] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 689.821309] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66f59169-1420-484f-89c9-2c4b119e58ba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.832429] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Waiting for the task: (returnval){ [ 689.832429] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528431a8-4dd5-0126-125b-568e0c3cc180" [ 689.832429] env[69475]: _type = "Task" [ 689.832429] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.846266] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528431a8-4dd5-0126-125b-568e0c3cc180, 'name': SearchDatastore_Task, 'duration_secs': 0.009682} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.847897] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8621c2e4-e26e-465b-b188-fe39c0859714 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.857493] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Waiting for the task: (returnval){ [ 689.857493] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f2fe16-88c2-38d7-5851-5cb65d40c451" [ 689.857493] env[69475]: _type = "Task" [ 689.857493] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.868419] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f2fe16-88c2-38d7-5851-5cb65d40c451, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.928569] env[69475]: INFO nova.compute.manager [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Took 34.56 seconds to build instance. [ 689.961206] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507778, 'name': CreateVM_Task, 'duration_secs': 0.388835} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.961206] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 689.962118] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.962449] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.962925] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 689.963562] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb8586f9-703e-46ae-8750-96f32113a3d8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.971029] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Waiting for the task: (returnval){ [ 689.971029] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52bb7aab-551e-42c0-020c-ccf6205f4668" [ 689.971029] env[69475]: _type = "Task" [ 689.971029] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.982829] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52bb7aab-551e-42c0-020c-ccf6205f4668, 'name': SearchDatastore_Task, 'duration_secs': 0.010356} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.982829] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 689.982829] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 689.982829] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.058365] env[69475]: DEBUG nova.network.neutron [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Successfully created port: a4b5cd8c-fa71-4d57-ba30-262f752f04db {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 690.188631] env[69475]: DEBUG nova.network.neutron [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 690.204561] env[69475]: DEBUG nova.compute.manager [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 690.250790] env[69475]: INFO nova.compute.manager [-] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Took 1.27 seconds to deallocate network for instance. [ 690.252934] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca97dbcb-8dc8-4799-a04c-edbc146240bd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.259084] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Acquiring lock "refresh_cache-4c2e12bf-3f16-47de-a604-44b62a6c7137" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 690.259438] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Acquired lock "refresh_cache-4c2e12bf-3f16-47de-a604-44b62a6c7137" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.259438] env[69475]: DEBUG nova.network.neutron [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 690.272237] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27974b0-8870-4df8-9029-b3c6c22c5cf0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.316259] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378d7fe1-96cf-4760-b198-34369a530354 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.326726] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08381d6e-9139-423e-84d8-b9b279d83738 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.344700] env[69475]: DEBUG nova.compute.provider_tree [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 690.348415] env[69475]: DEBUG nova.network.neutron [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.369578] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f2fe16-88c2-38d7-5851-5cb65d40c451, 'name': SearchDatastore_Task, 'duration_secs': 0.009679} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.370439] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.370794] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 3149cd80-503c-42e4-ac91-54aababe84e3/3149cd80-503c-42e4-ac91-54aababe84e3.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 690.371171] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 690.371431] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 690.371699] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b1fd3f17-0f07-47cd-99c4-ba68d637b8b7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.374791] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1a8acea1-c277-4577-902f-3f27b9713dd0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.383912] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Waiting for the task: (returnval){ [ 690.383912] env[69475]: value = "task-3507779" [ 690.383912] env[69475]: _type = "Task" [ 690.383912] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.385242] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 690.385670] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 690.390965] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f813eec-7fde-4846-a348-f93da7a2f554 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.398899] env[69475]: DEBUG nova.compute.manager [req-b4a97cba-7735-48d3-99fa-7b1446e32323 req-5a0c3f32-78bb-42da-84e7-6540605ee209 service nova] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Received event network-vif-deleted-18760a36-7bdf-4698-95f1-514490cd8c2b {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 690.405367] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': task-3507779, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.406954] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Waiting for the task: (returnval){ [ 690.406954] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5224d0d5-b763-80bd-637a-21a511f70066" [ 690.406954] env[69475]: _type = "Task" [ 690.406954] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.416544] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5224d0d5-b763-80bd-637a-21a511f70066, 'name': SearchDatastore_Task, 'duration_secs': 0.009831} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.417321] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-802d1e85-2ee0-4812-91ca-6cc75375faf1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.423091] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Waiting for the task: (returnval){ [ 690.423091] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527e55bc-7f9d-c5d0-2c38-21f975ee4ceb" [ 690.423091] env[69475]: _type = "Task" [ 690.423091] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.432748] env[69475]: DEBUG oslo_concurrency.lockutils [None req-678d5e4a-a861-46da-b36c-3d74a2625c17 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "9e2d4d61-71ed-447a-b28e-c29c5bd8d763" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.483s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 690.433317] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527e55bc-7f9d-c5d0-2c38-21f975ee4ceb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.518164] env[69475]: DEBUG nova.network.neutron [req-a616b277-bacc-41f3-9d5a-72df044c328a req-05f03338-41fe-433a-a3df-dc2568adb1e3 service nova] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Updated VIF entry in instance network info cache for port 01154e4e-cf6b-4d07-92e9-8c4c58376888. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 690.518555] env[69475]: DEBUG nova.network.neutron [req-a616b277-bacc-41f3-9d5a-72df044c328a req-05f03338-41fe-433a-a3df-dc2568adb1e3 service nova] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Updating instance_info_cache with network_info: [{"id": "01154e4e-cf6b-4d07-92e9-8c4c58376888", "address": "fa:16:3e:bc:0c:d3", "network": {"id": "183d8509-9270-4917-9966-4eb41d4c85fb", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-871424087-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b0f344232f8e431e89a27e224dc13412", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "438671d0-9468-4e44-84c1-4c0ebaa743e0", "external-id": "nsx-vlan-transportzone-918", "segmentation_id": 918, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap01154e4e-cf", "ovs_interfaceid": "01154e4e-cf6b-4d07-92e9-8c4c58376888", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.772926] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 690.808962] env[69475]: DEBUG nova.network.neutron [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 690.850856] env[69475]: DEBUG nova.scheduler.client.report [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 690.855501] env[69475]: DEBUG oslo_concurrency.lockutils [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Releasing lock "refresh_cache-b87cac84-ea70-428b-872e-4f6145e36b39" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.855978] env[69475]: DEBUG nova.compute.manager [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 690.856222] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 690.857430] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c85ff9-0897-4221-9f57-a7ec95df38fb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.869430] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 690.873863] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d33e5274-83b6-4374-95a1-a0b33b5ca3b6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.883526] env[69475]: DEBUG oslo_vmware.api [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Waiting for the task: (returnval){ [ 690.883526] env[69475]: value = "task-3507780" [ 690.883526] env[69475]: _type = "Task" [ 690.883526] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.898077] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': task-3507779, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.901269] env[69475]: DEBUG oslo_vmware.api [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': task-3507780, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.933248] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527e55bc-7f9d-c5d0-2c38-21f975ee4ceb, 'name': SearchDatastore_Task, 'duration_secs': 0.009633} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.933514] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.933785] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] e48e2cc1-7d60-457f-8f1c-649f0dda8cdb/e48e2cc1-7d60-457f-8f1c-649f0dda8cdb.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 690.934048] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2f3b341-a125-4697-aadf-0ae23c28444d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.936072] env[69475]: DEBUG nova.compute.manager [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 690.947261] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Waiting for the task: (returnval){ [ 690.947261] env[69475]: value = "task-3507781" [ 690.947261] env[69475]: _type = "Task" [ 690.947261] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.958335] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': task-3507781, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.020764] env[69475]: DEBUG oslo_concurrency.lockutils [req-a616b277-bacc-41f3-9d5a-72df044c328a req-05f03338-41fe-433a-a3df-dc2568adb1e3 service nova] Releasing lock "refresh_cache-e48e2cc1-7d60-457f-8f1c-649f0dda8cdb" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.071642] env[69475]: DEBUG nova.network.neutron [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Updating instance_info_cache with network_info: [{"id": "c1fc8b83-7009-439e-b6cd-c8b86d680b84", "address": "fa:16:3e:db:fd:f5", "network": {"id": "94f8d4e2-45bb-44e4-a68f-42c66bc34a56", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-444583420-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25294f782189432c852adf8bd89f363a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1fc8b83-70", "ovs_interfaceid": "c1fc8b83-7009-439e-b6cd-c8b86d680b84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 691.188710] env[69475]: DEBUG nova.compute.manager [req-070623a0-62fb-4c2b-b386-18c9643dbc2d req-6e4bbad7-7af2-4558-b1ca-314c26daed9e service nova] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Received event network-vif-plugged-c1fc8b83-7009-439e-b6cd-c8b86d680b84 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 691.188909] env[69475]: DEBUG oslo_concurrency.lockutils [req-070623a0-62fb-4c2b-b386-18c9643dbc2d req-6e4bbad7-7af2-4558-b1ca-314c26daed9e service nova] Acquiring lock "4c2e12bf-3f16-47de-a604-44b62a6c7137-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.189149] env[69475]: DEBUG oslo_concurrency.lockutils [req-070623a0-62fb-4c2b-b386-18c9643dbc2d req-6e4bbad7-7af2-4558-b1ca-314c26daed9e service nova] Lock "4c2e12bf-3f16-47de-a604-44b62a6c7137-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.189320] env[69475]: DEBUG oslo_concurrency.lockutils [req-070623a0-62fb-4c2b-b386-18c9643dbc2d req-6e4bbad7-7af2-4558-b1ca-314c26daed9e service nova] Lock "4c2e12bf-3f16-47de-a604-44b62a6c7137-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.189564] env[69475]: DEBUG nova.compute.manager [req-070623a0-62fb-4c2b-b386-18c9643dbc2d req-6e4bbad7-7af2-4558-b1ca-314c26daed9e service nova] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] No waiting events found dispatching network-vif-plugged-c1fc8b83-7009-439e-b6cd-c8b86d680b84 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 691.189917] env[69475]: WARNING nova.compute.manager [req-070623a0-62fb-4c2b-b386-18c9643dbc2d req-6e4bbad7-7af2-4558-b1ca-314c26daed9e service nova] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Received unexpected event network-vif-plugged-c1fc8b83-7009-439e-b6cd-c8b86d680b84 for instance with vm_state building and task_state spawning. [ 691.190102] env[69475]: DEBUG nova.compute.manager [req-070623a0-62fb-4c2b-b386-18c9643dbc2d req-6e4bbad7-7af2-4558-b1ca-314c26daed9e service nova] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Received event network-changed-c1fc8b83-7009-439e-b6cd-c8b86d680b84 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 691.190270] env[69475]: DEBUG nova.compute.manager [req-070623a0-62fb-4c2b-b386-18c9643dbc2d req-6e4bbad7-7af2-4558-b1ca-314c26daed9e service nova] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Refreshing instance network info cache due to event network-changed-c1fc8b83-7009-439e-b6cd-c8b86d680b84. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 691.190442] env[69475]: DEBUG oslo_concurrency.lockutils [req-070623a0-62fb-4c2b-b386-18c9643dbc2d req-6e4bbad7-7af2-4558-b1ca-314c26daed9e service nova] Acquiring lock "refresh_cache-4c2e12bf-3f16-47de-a604-44b62a6c7137" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.217179] env[69475]: DEBUG nova.compute.manager [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 691.219372] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb63c557-7e30-4ebb-9d32-b0f2c9585482 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "9e2d4d61-71ed-447a-b28e-c29c5bd8d763" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.219593] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb63c557-7e30-4ebb-9d32-b0f2c9585482 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "9e2d4d61-71ed-447a-b28e-c29c5bd8d763" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.219769] env[69475]: DEBUG nova.compute.manager [None req-fb63c557-7e30-4ebb-9d32-b0f2c9585482 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 691.221092] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3e57f6-4abe-48e7-b488-1d6a3adead1d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.231325] env[69475]: DEBUG nova.compute.manager [None req-fb63c557-7e30-4ebb-9d32-b0f2c9585482 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69475) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 691.231952] env[69475]: DEBUG nova.objects.instance [None req-fb63c557-7e30-4ebb-9d32-b0f2c9585482 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lazy-loading 'flavor' on Instance uuid 9e2d4d61-71ed-447a-b28e-c29c5bd8d763 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 691.248997] env[69475]: DEBUG nova.virt.hardware [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 691.248997] env[69475]: DEBUG nova.virt.hardware [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 691.249191] env[69475]: DEBUG nova.virt.hardware [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 691.249327] env[69475]: DEBUG nova.virt.hardware [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 691.249427] env[69475]: DEBUG nova.virt.hardware [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 691.249583] env[69475]: DEBUG nova.virt.hardware [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 691.249830] env[69475]: DEBUG nova.virt.hardware [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 691.249973] env[69475]: DEBUG nova.virt.hardware [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 691.250157] env[69475]: DEBUG nova.virt.hardware [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 691.250368] env[69475]: DEBUG nova.virt.hardware [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 691.250628] env[69475]: DEBUG nova.virt.hardware [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 691.251520] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d728d2-6dd1-481a-a027-c3030084dfe6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.264030] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b412c311-a5d8-4f42-a97d-8fe6abafc900 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.358095] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.167s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.361354] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.468s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.362872] env[69475]: INFO nova.compute.claims [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 691.397623] env[69475]: DEBUG oslo_vmware.api [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': task-3507780, 'name': PowerOffVM_Task, 'duration_secs': 0.20908} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.398750] env[69475]: INFO nova.scheduler.client.report [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Deleted allocations for instance 3c253a57-1c93-4e8d-aaa1-1331c0547d85 [ 691.400172] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 691.400340] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 691.400628] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d76c119-a6b8-4833-b7ae-17f7102df31e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.411012] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': task-3507779, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538482} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.415015] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 3149cd80-503c-42e4-ac91-54aababe84e3/3149cd80-503c-42e4-ac91-54aababe84e3.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 691.415015] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 691.419427] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-67287320-cd63-4b05-a36c-d26b4248b040 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.427440] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Waiting for the task: (returnval){ [ 691.427440] env[69475]: value = "task-3507783" [ 691.427440] env[69475]: _type = "Task" [ 691.427440] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.443517] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': task-3507783, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.447423] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 691.448071] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 691.448071] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Deleting the datastore file [datastore2] b87cac84-ea70-428b-872e-4f6145e36b39 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 691.448748] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-96e0d5d7-2a94-4a5e-ad00-fe97f54c320f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.464467] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': task-3507781, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478046} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.466719] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.467075] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] e48e2cc1-7d60-457f-8f1c-649f0dda8cdb/e48e2cc1-7d60-457f-8f1c-649f0dda8cdb.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 691.467875] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 691.467875] env[69475]: DEBUG oslo_vmware.api [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Waiting for the task: (returnval){ [ 691.467875] env[69475]: value = "task-3507784" [ 691.467875] env[69475]: _type = "Task" [ 691.467875] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.467875] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-572d68e0-1a33-461b-b412-216ba70b09bc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.483771] env[69475]: DEBUG oslo_vmware.api [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': task-3507784, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.485873] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Waiting for the task: (returnval){ [ 691.485873] env[69475]: value = "task-3507785" [ 691.485873] env[69475]: _type = "Task" [ 691.485873] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.497035] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': task-3507785, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.574766] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Releasing lock "refresh_cache-4c2e12bf-3f16-47de-a604-44b62a6c7137" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.575237] env[69475]: DEBUG nova.compute.manager [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Instance network_info: |[{"id": "c1fc8b83-7009-439e-b6cd-c8b86d680b84", "address": "fa:16:3e:db:fd:f5", "network": {"id": "94f8d4e2-45bb-44e4-a68f-42c66bc34a56", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-444583420-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25294f782189432c852adf8bd89f363a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1fc8b83-70", "ovs_interfaceid": "c1fc8b83-7009-439e-b6cd-c8b86d680b84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 691.575585] env[69475]: DEBUG oslo_concurrency.lockutils [req-070623a0-62fb-4c2b-b386-18c9643dbc2d req-6e4bbad7-7af2-4558-b1ca-314c26daed9e service nova] Acquired lock "refresh_cache-4c2e12bf-3f16-47de-a604-44b62a6c7137" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 691.575845] env[69475]: DEBUG nova.network.neutron [req-070623a0-62fb-4c2b-b386-18c9643dbc2d req-6e4bbad7-7af2-4558-b1ca-314c26daed9e service nova] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Refreshing network info cache for port c1fc8b83-7009-439e-b6cd-c8b86d680b84 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 691.577274] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:fd:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'fd77ecbc-aaaf-45f4-ae8f-977d90e4052f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c1fc8b83-7009-439e-b6cd-c8b86d680b84', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 691.586259] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Creating folder: Project (25294f782189432c852adf8bd89f363a). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 691.587471] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-05c49d0a-bd15-43eb-9872-13a69090e0a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.601478] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Created folder: Project (25294f782189432c852adf8bd89f363a) in parent group-v700823. [ 691.601650] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Creating folder: Instances. Parent ref: group-v700915. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 691.601910] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31152526-3b6d-47ba-b63a-067977de81f2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.611256] env[69475]: DEBUG nova.network.neutron [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Successfully updated port: a4b5cd8c-fa71-4d57-ba30-262f752f04db {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 691.614080] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Created folder: Instances in parent group-v700915. [ 691.614080] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 691.614080] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 691.614297] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c196d523-8f2d-43e7-b8c0-9a065628506e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.631741] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Acquiring lock "refresh_cache-8fbabf86-be9e-47ec-8c4c-adea4c68abe8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.631938] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Acquired lock "refresh_cache-8fbabf86-be9e-47ec-8c4c-adea4c68abe8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 691.632149] env[69475]: DEBUG nova.network.neutron [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 691.639335] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 691.639335] env[69475]: value = "task-3507788" [ 691.639335] env[69475]: _type = "Task" [ 691.639335] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.648991] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507788, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.921778] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6a5db787-58a8-4c7f-95cd-e9486a7e642d tempest-ServerAddressesNegativeTestJSON-1763061698 tempest-ServerAddressesNegativeTestJSON-1763061698-project-member] Lock "3c253a57-1c93-4e8d-aaa1-1331c0547d85" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.368s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.940221] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': task-3507783, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074335} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.940515] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 691.941471] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9262f76f-176d-424f-8b6f-8d46c55bc322 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.965620] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] 3149cd80-503c-42e4-ac91-54aababe84e3/3149cd80-503c-42e4-ac91-54aababe84e3.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 691.966466] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-55b809be-6b59-488a-a935-5173f424e017 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.996035] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Waiting for the task: (returnval){ [ 691.996035] env[69475]: value = "task-3507789" [ 691.996035] env[69475]: _type = "Task" [ 691.996035] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.996035] env[69475]: DEBUG oslo_vmware.api [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Task: {'id': task-3507784, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.097487} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.998884] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 691.999100] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 691.999292] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 691.999466] env[69475]: INFO nova.compute.manager [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Took 1.14 seconds to destroy the instance on the hypervisor. [ 691.999700] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 691.999962] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': task-3507785, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072764} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.003213] env[69475]: DEBUG nova.compute.manager [-] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 692.003319] env[69475]: DEBUG nova.network.neutron [-] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 692.007692] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 692.008643] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a818729-1b99-4a6f-9457-cddf46132ea1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.018148] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': task-3507789, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.039222] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Reconfiguring VM instance instance-0000001d to attach disk [datastore2] e48e2cc1-7d60-457f-8f1c-649f0dda8cdb/e48e2cc1-7d60-457f-8f1c-649f0dda8cdb.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 692.040224] env[69475]: DEBUG nova.network.neutron [-] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.041691] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c5de196-c3d1-4fe7-a63c-f0611f447339 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.066029] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Waiting for the task: (returnval){ [ 692.066029] env[69475]: value = "task-3507790" [ 692.066029] env[69475]: _type = "Task" [ 692.066029] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.074735] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': task-3507790, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.150686] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507788, 'name': CreateVM_Task, 'duration_secs': 0.418061} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.150903] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 692.151687] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.151929] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.152313] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 692.152635] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-952bc7cd-3213-44d9-83d6-6a579be43725 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.161654] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the task: (returnval){ [ 692.161654] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52fc3410-73d4-97f9-464b-160a3422e37c" [ 692.161654] env[69475]: _type = "Task" [ 692.161654] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.170676] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52fc3410-73d4-97f9-464b-160a3422e37c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.195800] env[69475]: DEBUG nova.network.neutron [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 692.241519] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb63c557-7e30-4ebb-9d32-b0f2c9585482 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 692.241844] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-50da06e7-fa36-4a73-aec9-93c12a8c0ec2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.252204] env[69475]: DEBUG oslo_vmware.api [None req-fb63c557-7e30-4ebb-9d32-b0f2c9585482 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 692.252204] env[69475]: value = "task-3507791" [ 692.252204] env[69475]: _type = "Task" [ 692.252204] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.261939] env[69475]: DEBUG oslo_vmware.api [None req-fb63c557-7e30-4ebb-9d32-b0f2c9585482 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507791, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.301113] env[69475]: DEBUG nova.network.neutron [req-070623a0-62fb-4c2b-b386-18c9643dbc2d req-6e4bbad7-7af2-4558-b1ca-314c26daed9e service nova] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Updated VIF entry in instance network info cache for port c1fc8b83-7009-439e-b6cd-c8b86d680b84. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 692.301520] env[69475]: DEBUG nova.network.neutron [req-070623a0-62fb-4c2b-b386-18c9643dbc2d req-6e4bbad7-7af2-4558-b1ca-314c26daed9e service nova] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Updating instance_info_cache with network_info: [{"id": "c1fc8b83-7009-439e-b6cd-c8b86d680b84", "address": "fa:16:3e:db:fd:f5", "network": {"id": "94f8d4e2-45bb-44e4-a68f-42c66bc34a56", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-444583420-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "25294f782189432c852adf8bd89f363a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "fd77ecbc-aaaf-45f4-ae8f-977d90e4052f", "external-id": "nsx-vlan-transportzone-171", "segmentation_id": 171, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1fc8b83-70", "ovs_interfaceid": "c1fc8b83-7009-439e-b6cd-c8b86d680b84", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.486114] env[69475]: DEBUG nova.network.neutron [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Updating instance_info_cache with network_info: [{"id": "a4b5cd8c-fa71-4d57-ba30-262f752f04db", "address": "fa:16:3e:d8:a4:fb", "network": {"id": "ab157278-7d5e-44e2-bcb7-39ebeeeaa108", "bridge": "br-int", "label": "tempest-ServersTestJSON-1740511078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "694f14f9b2e64d769ca5ced4d71110c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4b5cd8c-fa", "ovs_interfaceid": "a4b5cd8c-fa71-4d57-ba30-262f752f04db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.512882] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': task-3507789, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.559268] env[69475]: DEBUG nova.network.neutron [-] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.576519] env[69475]: DEBUG nova.compute.manager [req-56116829-e073-4243-b47b-2728b09c509b req-88fe44d2-ab2c-43a8-84f7-91a20eb2146e service nova] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Received event network-vif-plugged-a4b5cd8c-fa71-4d57-ba30-262f752f04db {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 692.576765] env[69475]: DEBUG oslo_concurrency.lockutils [req-56116829-e073-4243-b47b-2728b09c509b req-88fe44d2-ab2c-43a8-84f7-91a20eb2146e service nova] Acquiring lock "8fbabf86-be9e-47ec-8c4c-adea4c68abe8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.576948] env[69475]: DEBUG oslo_concurrency.lockutils [req-56116829-e073-4243-b47b-2728b09c509b req-88fe44d2-ab2c-43a8-84f7-91a20eb2146e service nova] Lock "8fbabf86-be9e-47ec-8c4c-adea4c68abe8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.577146] env[69475]: DEBUG oslo_concurrency.lockutils [req-56116829-e073-4243-b47b-2728b09c509b req-88fe44d2-ab2c-43a8-84f7-91a20eb2146e service nova] Lock "8fbabf86-be9e-47ec-8c4c-adea4c68abe8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.577293] env[69475]: DEBUG nova.compute.manager [req-56116829-e073-4243-b47b-2728b09c509b req-88fe44d2-ab2c-43a8-84f7-91a20eb2146e service nova] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] No waiting events found dispatching network-vif-plugged-a4b5cd8c-fa71-4d57-ba30-262f752f04db {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 692.577454] env[69475]: WARNING nova.compute.manager [req-56116829-e073-4243-b47b-2728b09c509b req-88fe44d2-ab2c-43a8-84f7-91a20eb2146e service nova] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Received unexpected event network-vif-plugged-a4b5cd8c-fa71-4d57-ba30-262f752f04db for instance with vm_state building and task_state spawning. [ 692.577607] env[69475]: DEBUG nova.compute.manager [req-56116829-e073-4243-b47b-2728b09c509b req-88fe44d2-ab2c-43a8-84f7-91a20eb2146e service nova] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Received event network-changed-a4b5cd8c-fa71-4d57-ba30-262f752f04db {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 692.577755] env[69475]: DEBUG nova.compute.manager [req-56116829-e073-4243-b47b-2728b09c509b req-88fe44d2-ab2c-43a8-84f7-91a20eb2146e service nova] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Refreshing instance network info cache due to event network-changed-a4b5cd8c-fa71-4d57-ba30-262f752f04db. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 692.577909] env[69475]: DEBUG oslo_concurrency.lockutils [req-56116829-e073-4243-b47b-2728b09c509b req-88fe44d2-ab2c-43a8-84f7-91a20eb2146e service nova] Acquiring lock "refresh_cache-8fbabf86-be9e-47ec-8c4c-adea4c68abe8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.581689] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': task-3507790, 'name': ReconfigVM_Task, 'duration_secs': 0.32591} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.583059] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Reconfigured VM instance instance-0000001d to attach disk [datastore2] e48e2cc1-7d60-457f-8f1c-649f0dda8cdb/e48e2cc1-7d60-457f-8f1c-649f0dda8cdb.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 692.583059] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5ab9deb8-4f37-4d0b-af9e-cb75cb689095 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.591272] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Waiting for the task: (returnval){ [ 692.591272] env[69475]: value = "task-3507792" [ 692.591272] env[69475]: _type = "Task" [ 692.591272] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.602807] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': task-3507792, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.674782] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52fc3410-73d4-97f9-464b-160a3422e37c, 'name': SearchDatastore_Task, 'duration_secs': 0.018462} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.675247] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.675507] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 692.675836] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.676012] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.676284] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 692.676589] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e16b800a-17bc-42b5-8157-2f0e5d09ab7b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.690992] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 692.691691] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 692.694888] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30dc851c-af92-4888-9bd6-918cce39f4ea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.706827] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the task: (returnval){ [ 692.706827] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52025644-0f5e-934e-1dc4-bcf168617932" [ 692.706827] env[69475]: _type = "Task" [ 692.706827] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.717613] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52025644-0f5e-934e-1dc4-bcf168617932, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.763233] env[69475]: DEBUG oslo_vmware.api [None req-fb63c557-7e30-4ebb-9d32-b0f2c9585482 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507791, 'name': PowerOffVM_Task, 'duration_secs': 0.227505} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.765968] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb63c557-7e30-4ebb-9d32-b0f2c9585482 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 692.766186] env[69475]: DEBUG nova.compute.manager [None req-fb63c557-7e30-4ebb-9d32-b0f2c9585482 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 692.767128] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66475389-d871-4277-b6af-f42fc91eed34 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.804242] env[69475]: DEBUG oslo_concurrency.lockutils [req-070623a0-62fb-4c2b-b386-18c9643dbc2d req-6e4bbad7-7af2-4558-b1ca-314c26daed9e service nova] Releasing lock "refresh_cache-4c2e12bf-3f16-47de-a604-44b62a6c7137" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.906423] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b30c7815-6218-4cbe-95e6-9566c12bac69 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.917246] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39af993-1dbe-4fca-ba5f-1dc661bb1480 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.964065] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dba5aa9-6a74-4749-a901-a6f1c3000aa5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.973579] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d93dd2-4f9a-4f11-a0fb-32fa2095e867 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.989919] env[69475]: DEBUG nova.compute.provider_tree [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.990614] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Releasing lock "refresh_cache-8fbabf86-be9e-47ec-8c4c-adea4c68abe8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.994019] env[69475]: DEBUG nova.compute.manager [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Instance network_info: |[{"id": "a4b5cd8c-fa71-4d57-ba30-262f752f04db", "address": "fa:16:3e:d8:a4:fb", "network": {"id": "ab157278-7d5e-44e2-bcb7-39ebeeeaa108", "bridge": "br-int", "label": "tempest-ServersTestJSON-1740511078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "694f14f9b2e64d769ca5ced4d71110c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4b5cd8c-fa", "ovs_interfaceid": "a4b5cd8c-fa71-4d57-ba30-262f752f04db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 692.994019] env[69475]: DEBUG oslo_concurrency.lockutils [req-56116829-e073-4243-b47b-2728b09c509b req-88fe44d2-ab2c-43a8-84f7-91a20eb2146e service nova] Acquired lock "refresh_cache-8fbabf86-be9e-47ec-8c4c-adea4c68abe8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.994206] env[69475]: DEBUG nova.network.neutron [req-56116829-e073-4243-b47b-2728b09c509b req-88fe44d2-ab2c-43a8-84f7-91a20eb2146e service nova] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Refreshing network info cache for port a4b5cd8c-fa71-4d57-ba30-262f752f04db {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 692.994206] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:a4:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f67a2790-f2b0-4d03-b606-0bfaee7a4229', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a4b5cd8c-fa71-4d57-ba30-262f752f04db', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 693.000132] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Creating folder: Project (694f14f9b2e64d769ca5ced4d71110c4). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 693.000641] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e3404fc-b738-4e2c-9929-6e7287e41a61 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.016175] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': task-3507789, 'name': ReconfigVM_Task, 'duration_secs': 0.541223} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.016175] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Reconfigured VM instance instance-00000019 to attach disk [datastore2] 3149cd80-503c-42e4-ac91-54aababe84e3/3149cd80-503c-42e4-ac91-54aababe84e3.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 693.016175] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb282a31-cabc-41a0-96d0-037b8a3b745f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.019528] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Created folder: Project (694f14f9b2e64d769ca5ced4d71110c4) in parent group-v700823. [ 693.019718] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Creating folder: Instances. Parent ref: group-v700918. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 693.022020] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9672fca0-76d1-4177-a4f9-b409f4b216f3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.022884] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Waiting for the task: (returnval){ [ 693.022884] env[69475]: value = "task-3507794" [ 693.022884] env[69475]: _type = "Task" [ 693.022884] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.032037] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': task-3507794, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.035102] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Created folder: Instances in parent group-v700918. [ 693.035102] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 693.035102] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 693.035102] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-429d779a-28e0-439f-bc63-2bf231bafc06 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.058824] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 693.058824] env[69475]: value = "task-3507796" [ 693.058824] env[69475]: _type = "Task" [ 693.058824] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.062253] env[69475]: INFO nova.compute.manager [-] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Took 1.06 seconds to deallocate network for instance. [ 693.072021] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507796, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.100964] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': task-3507792, 'name': Rename_Task, 'duration_secs': 0.152146} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.101929] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 693.102188] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9feab94f-b061-4148-bc06-0ee971c8866a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.109724] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Waiting for the task: (returnval){ [ 693.109724] env[69475]: value = "task-3507797" [ 693.109724] env[69475]: _type = "Task" [ 693.109724] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.118886] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': task-3507797, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.219200] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52025644-0f5e-934e-1dc4-bcf168617932, 'name': SearchDatastore_Task, 'duration_secs': 0.02099} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.220038] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b301d07-0ed1-4815-9df1-eef054634417 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.226358] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the task: (returnval){ [ 693.226358] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52035df4-69ec-c59c-c58d-39455f3f2de5" [ 693.226358] env[69475]: _type = "Task" [ 693.226358] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.235751] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52035df4-69ec-c59c-c58d-39455f3f2de5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.282112] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fb63c557-7e30-4ebb-9d32-b0f2c9585482 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "9e2d4d61-71ed-447a-b28e-c29c5bd8d763" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.062s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 693.493207] env[69475]: DEBUG nova.scheduler.client.report [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 693.538947] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': task-3507794, 'name': Rename_Task, 'duration_secs': 0.168779} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.539743] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 693.539867] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c928ff3e-389e-407e-9143-fb091000992f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.548509] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Waiting for the task: (returnval){ [ 693.548509] env[69475]: value = "task-3507798" [ 693.548509] env[69475]: _type = "Task" [ 693.548509] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.557797] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': task-3507798, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.571849] env[69475]: DEBUG oslo_concurrency.lockutils [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.579102] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507796, 'name': CreateVM_Task, 'duration_secs': 0.344305} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.579102] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 693.579766] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.580419] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 693.580419] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 693.580708] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c36c513-7df5-4277-ac5f-fcdb508fdc99 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.586663] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Waiting for the task: (returnval){ [ 693.586663] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529a3daa-9b3e-e569-f4a1-827d0187d834" [ 693.586663] env[69475]: _type = "Task" [ 693.586663] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.596185] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529a3daa-9b3e-e569-f4a1-827d0187d834, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.626526] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': task-3507797, 'name': PowerOnVM_Task} progress is 79%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.738236] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52035df4-69ec-c59c-c58d-39455f3f2de5, 'name': SearchDatastore_Task, 'duration_secs': 0.013712} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.738516] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.738793] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 4c2e12bf-3f16-47de-a604-44b62a6c7137/4c2e12bf-3f16-47de-a604-44b62a6c7137.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 693.739069] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd891f9d-acae-47f4-bda2-007786392fb3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.747744] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the task: (returnval){ [ 693.747744] env[69475]: value = "task-3507799" [ 693.747744] env[69475]: _type = "Task" [ 693.747744] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.762463] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507799, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.796867] env[69475]: DEBUG nova.network.neutron [req-56116829-e073-4243-b47b-2728b09c509b req-88fe44d2-ab2c-43a8-84f7-91a20eb2146e service nova] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Updated VIF entry in instance network info cache for port a4b5cd8c-fa71-4d57-ba30-262f752f04db. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 693.796867] env[69475]: DEBUG nova.network.neutron [req-56116829-e073-4243-b47b-2728b09c509b req-88fe44d2-ab2c-43a8-84f7-91a20eb2146e service nova] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Updating instance_info_cache with network_info: [{"id": "a4b5cd8c-fa71-4d57-ba30-262f752f04db", "address": "fa:16:3e:d8:a4:fb", "network": {"id": "ab157278-7d5e-44e2-bcb7-39ebeeeaa108", "bridge": "br-int", "label": "tempest-ServersTestJSON-1740511078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "694f14f9b2e64d769ca5ced4d71110c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4b5cd8c-fa", "ovs_interfaceid": "a4b5cd8c-fa71-4d57-ba30-262f752f04db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.006351] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.645s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.007450] env[69475]: DEBUG nova.compute.manager [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 694.012524] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 25.026s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.013222] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.013429] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69475) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 694.013799] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.947s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.015548] env[69475]: INFO nova.compute.claims [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 694.020936] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931f97e8-175c-41dc-9ebe-128f57d56827 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.031974] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b0375ff-f7e2-4652-beda-20fd3b81d001 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.066028] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5ead70-c930-449e-8e15-38b06e0f3f58 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.079835] env[69475]: DEBUG oslo_vmware.api [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Task: {'id': task-3507798, 'name': PowerOnVM_Task, 'duration_secs': 0.478249} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.080956] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260a461b-085f-4928-aa97-5b2bf4799764 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.085054] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 694.085256] env[69475]: DEBUG nova.compute.manager [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 694.086080] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30686d5-e898-480e-ab2a-206c81de2de7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.124078] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179948MB free_disk=89GB free_vcpus=48 pci_devices=None {{(pid=69475) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 694.124326] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.136436] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529a3daa-9b3e-e569-f4a1-827d0187d834, 'name': SearchDatastore_Task, 'duration_secs': 0.011689} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.137476] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.137740] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 694.137988] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.138151] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.138333] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 694.138611] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd0217e9-1417-4e35-a29d-88c0029a8fd9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.144872] env[69475]: DEBUG oslo_vmware.api [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': task-3507797, 'name': PowerOnVM_Task, 'duration_secs': 0.699633} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.145153] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 694.145379] env[69475]: INFO nova.compute.manager [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Took 8.00 seconds to spawn the instance on the hypervisor. [ 694.145555] env[69475]: DEBUG nova.compute.manager [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 694.146382] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0394da34-9106-4f4a-9b1f-077876725395 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.160840] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 694.161120] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 694.166174] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-176c776e-4b90-47b9-acc5-8adbd6cce106 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.174845] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Waiting for the task: (returnval){ [ 694.174845] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520109cb-0ec6-9e3b-9ade-c78215ddeef1" [ 694.174845] env[69475]: _type = "Task" [ 694.174845] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.189628] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520109cb-0ec6-9e3b-9ade-c78215ddeef1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.263472] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507799, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.505033} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.263866] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 4c2e12bf-3f16-47de-a604-44b62a6c7137/4c2e12bf-3f16-47de-a604-44b62a6c7137.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 694.263967] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 694.264277] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9789b3c7-f7a0-42c1-af57-27eeb4076ef0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.274882] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the task: (returnval){ [ 694.274882] env[69475]: value = "task-3507800" [ 694.274882] env[69475]: _type = "Task" [ 694.274882] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.285649] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507800, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.304911] env[69475]: DEBUG oslo_concurrency.lockutils [req-56116829-e073-4243-b47b-2728b09c509b req-88fe44d2-ab2c-43a8-84f7-91a20eb2146e service nova] Releasing lock "refresh_cache-8fbabf86-be9e-47ec-8c4c-adea4c68abe8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.521337] env[69475]: DEBUG nova.compute.utils [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 694.522767] env[69475]: DEBUG nova.compute.manager [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 694.522807] env[69475]: DEBUG nova.network.neutron [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 694.607921] env[69475]: DEBUG nova.policy [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd285f0a8d8c048f98cb85755233c5736', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '55bcc8b22ecf422b860efc0ee731ca37', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 694.644298] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.667446] env[69475]: INFO nova.compute.manager [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Took 35.76 seconds to build instance. [ 694.688033] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520109cb-0ec6-9e3b-9ade-c78215ddeef1, 'name': SearchDatastore_Task, 'duration_secs': 0.056595} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.689093] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e97ebdac-bef8-44ad-86c3-f7d200279d6c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.696666] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Waiting for the task: (returnval){ [ 694.696666] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528932c4-cd55-b758-9986-ab34e9eebad0" [ 694.696666] env[69475]: _type = "Task" [ 694.696666] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.707988] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528932c4-cd55-b758-9986-ab34e9eebad0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.787020] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507800, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106624} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.787020] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 694.787614] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b261b88-07d9-475d-ab69-63297e7d5992 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.815020] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Reconfiguring VM instance instance-0000001e to attach disk [datastore2] 4c2e12bf-3f16-47de-a604-44b62a6c7137/4c2e12bf-3f16-47de-a604-44b62a6c7137.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 694.815020] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-137a2787-752f-4e52-8c32-ae2dc60794c8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.839731] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the task: (returnval){ [ 694.839731] env[69475]: value = "task-3507801" [ 694.839731] env[69475]: _type = "Task" [ 694.839731] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.849708] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507801, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.932197] env[69475]: DEBUG nova.compute.manager [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 694.933117] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a87179ec-c481-42d4-a4b0-13318f851938 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.003468] env[69475]: DEBUG nova.network.neutron [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Successfully created port: f4c64f28-ecc9-429e-b7b1-363190aba0f8 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 695.005926] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Acquiring lock "3149cd80-503c-42e4-ac91-54aababe84e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.006161] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Lock "3149cd80-503c-42e4-ac91-54aababe84e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.006354] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Acquiring lock "3149cd80-503c-42e4-ac91-54aababe84e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.006548] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Lock "3149cd80-503c-42e4-ac91-54aababe84e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.006720] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Lock "3149cd80-503c-42e4-ac91-54aababe84e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.008682] env[69475]: INFO nova.compute.manager [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Terminating instance [ 695.026909] env[69475]: DEBUG nova.compute.manager [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 695.169427] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9c6e9870-ba05-45ae-b05e-0f74b0a2e61e tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Lock "e48e2cc1-7d60-457f-8f1c-649f0dda8cdb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.514s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.211491] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528932c4-cd55-b758-9986-ab34e9eebad0, 'name': SearchDatastore_Task, 'duration_secs': 0.010581} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.214226] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.214540] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 8fbabf86-be9e-47ec-8c4c-adea4c68abe8/8fbabf86-be9e-47ec-8c4c-adea4c68abe8.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 695.215029] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3d0d02d-3326-41f8-b8af-8d21e235fdc6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.224608] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Waiting for the task: (returnval){ [ 695.224608] env[69475]: value = "task-3507802" [ 695.224608] env[69475]: _type = "Task" [ 695.224608] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.242595] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': task-3507802, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.353445] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507801, 'name': ReconfigVM_Task, 'duration_secs': 0.323733} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.356013] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Reconfigured VM instance instance-0000001e to attach disk [datastore2] 4c2e12bf-3f16-47de-a604-44b62a6c7137/4c2e12bf-3f16-47de-a604-44b62a6c7137.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 695.356978] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d0f87f92-43ca-4be6-a17b-791736a10257 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.365062] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the task: (returnval){ [ 695.365062] env[69475]: value = "task-3507803" [ 695.365062] env[69475]: _type = "Task" [ 695.365062] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.381183] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507803, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.448800] env[69475]: INFO nova.compute.manager [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] instance snapshotting [ 695.448800] env[69475]: WARNING nova.compute.manager [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 695.451888] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5069f73-893d-4689-b0c4-ab2c6d869238 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.484750] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec027eed-6f5d-4bb0-bf3f-c5438b95b7ef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.517667] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Acquiring lock "refresh_cache-3149cd80-503c-42e4-ac91-54aababe84e3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.517667] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Acquired lock "refresh_cache-3149cd80-503c-42e4-ac91-54aababe84e3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.517667] env[69475]: DEBUG nova.network.neutron [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 695.658754] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15314e0e-ce77-402e-bc75-5c18ee539c51 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.668582] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ce8cc8b-7e56-4419-a3c6-4d9d674feabf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.672669] env[69475]: DEBUG nova.compute.manager [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 695.709207] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42361266-1318-4a46-a01d-150067c39fd2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.717851] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbabc5bd-6671-4c90-93d8-ee47ac4b3bc1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.734473] env[69475]: DEBUG nova.compute.provider_tree [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 695.745456] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': task-3507802, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48303} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.746478] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 8fbabf86-be9e-47ec-8c4c-adea4c68abe8/8fbabf86-be9e-47ec-8c4c-adea4c68abe8.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 695.746734] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 695.746947] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2edf2ded-de4a-4c41-a374-50c254dfafff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.755654] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Waiting for the task: (returnval){ [ 695.755654] env[69475]: value = "task-3507804" [ 695.755654] env[69475]: _type = "Task" [ 695.755654] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.764188] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': task-3507804, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.875491] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507803, 'name': Rename_Task, 'duration_secs': 0.421975} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.875791] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 695.876044] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-140c25f7-4273-47b0-998a-2924c9d78a2e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.882662] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the task: (returnval){ [ 695.882662] env[69475]: value = "task-3507805" [ 695.882662] env[69475]: _type = "Task" [ 695.882662] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.890578] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507805, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.999989] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 696.000388] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4d49390e-f814-419c-9c43-6eb12356b054 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.008852] env[69475]: DEBUG oslo_vmware.api [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 696.008852] env[69475]: value = "task-3507806" [ 696.008852] env[69475]: _type = "Task" [ 696.008852] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.017927] env[69475]: DEBUG oslo_vmware.api [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507806, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.022603] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4427c816-1a3d-4e3d-aa1f-273756ef0a02 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Acquiring lock "interface-e48e2cc1-7d60-457f-8f1c-649f0dda8cdb-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.022855] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4427c816-1a3d-4e3d-aa1f-273756ef0a02 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Lock "interface-e48e2cc1-7d60-457f-8f1c-649f0dda8cdb-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.023323] env[69475]: DEBUG nova.objects.instance [None req-4427c816-1a3d-4e3d-aa1f-273756ef0a02 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Lazy-loading 'flavor' on Instance uuid e48e2cc1-7d60-457f-8f1c-649f0dda8cdb {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 696.039854] env[69475]: DEBUG nova.network.neutron [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 696.046472] env[69475]: DEBUG nova.compute.manager [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 696.068337] env[69475]: DEBUG nova.virt.hardware [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 696.068761] env[69475]: DEBUG nova.virt.hardware [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 696.068804] env[69475]: DEBUG nova.virt.hardware [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 696.068976] env[69475]: DEBUG nova.virt.hardware [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 696.069194] env[69475]: DEBUG nova.virt.hardware [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 696.069354] env[69475]: DEBUG nova.virt.hardware [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 696.069562] env[69475]: DEBUG nova.virt.hardware [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 696.069718] env[69475]: DEBUG nova.virt.hardware [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 696.069889] env[69475]: DEBUG nova.virt.hardware [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 696.071216] env[69475]: DEBUG nova.virt.hardware [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 696.071216] env[69475]: DEBUG nova.virt.hardware [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 696.071216] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e0c8bf1-c6c9-41d9-8b87-8e6b86701621 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.080124] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de5e2a4-daf5-4a9d-b63f-dd38de3ff950 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.096856] env[69475]: DEBUG nova.network.neutron [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.191334] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 696.237940] env[69475]: DEBUG nova.scheduler.client.report [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 696.266823] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': task-3507804, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07482} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.267135] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 696.267927] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-619c0c24-5cfe-4c96-b83a-27d3d817c64f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.293409] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Reconfiguring VM instance instance-0000001f to attach disk [datastore2] 8fbabf86-be9e-47ec-8c4c-adea4c68abe8/8fbabf86-be9e-47ec-8c4c-adea4c68abe8.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 696.293687] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc4c46e0-d54c-4f5a-a4a0-dd6560df28ff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.313788] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Waiting for the task: (returnval){ [ 696.313788] env[69475]: value = "task-3507807" [ 696.313788] env[69475]: _type = "Task" [ 696.313788] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.326835] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': task-3507807, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.393900] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507805, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.519070] env[69475]: DEBUG oslo_vmware.api [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507806, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.527928] env[69475]: DEBUG nova.objects.instance [None req-4427c816-1a3d-4e3d-aa1f-273756ef0a02 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Lazy-loading 'pci_requests' on Instance uuid e48e2cc1-7d60-457f-8f1c-649f0dda8cdb {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 696.599162] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Releasing lock "refresh_cache-3149cd80-503c-42e4-ac91-54aababe84e3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.599611] env[69475]: DEBUG nova.compute.manager [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 696.599888] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 696.600976] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c325c4a2-aeec-45de-8899-09b2a67599d9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.611345] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 696.611735] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd82df35-3841-414d-97f1-56d0814f5429 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.620121] env[69475]: DEBUG oslo_vmware.api [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 696.620121] env[69475]: value = "task-3507808" [ 696.620121] env[69475]: _type = "Task" [ 696.620121] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.630187] env[69475]: DEBUG oslo_vmware.api [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507808, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.743633] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.729s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 696.744294] env[69475]: DEBUG nova.compute.manager [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 696.747073] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.397s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 696.747866] env[69475]: DEBUG nova.objects.instance [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Lazy-loading 'resources' on Instance uuid 77a5665d-b00f-42c2-a1e8-319dfd232b06 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 696.825539] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': task-3507807, 'name': ReconfigVM_Task, 'duration_secs': 0.415562} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.825884] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Reconfigured VM instance instance-0000001f to attach disk [datastore2] 8fbabf86-be9e-47ec-8c4c-adea4c68abe8/8fbabf86-be9e-47ec-8c4c-adea4c68abe8.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 696.826601] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1621400a-b75c-4219-a28e-92b88717ac2c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.835420] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Waiting for the task: (returnval){ [ 696.835420] env[69475]: value = "task-3507809" [ 696.835420] env[69475]: _type = "Task" [ 696.835420] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.845131] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': task-3507809, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.894302] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507805, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.994065] env[69475]: DEBUG nova.network.neutron [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Successfully updated port: f4c64f28-ecc9-429e-b7b1-363190aba0f8 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 697.020203] env[69475]: DEBUG oslo_vmware.api [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507806, 'name': CreateSnapshot_Task, 'duration_secs': 0.827849} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.020512] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 697.021331] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42eab7b-d296-4920-b4e5-a639c35da124 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.031851] env[69475]: DEBUG nova.objects.base [None req-4427c816-1a3d-4e3d-aa1f-273756ef0a02 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 697.032066] env[69475]: DEBUG nova.network.neutron [None req-4427c816-1a3d-4e3d-aa1f-273756ef0a02 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 697.119126] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4427c816-1a3d-4e3d-aa1f-273756ef0a02 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Lock "interface-e48e2cc1-7d60-457f-8f1c-649f0dda8cdb-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.096s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.130970] env[69475]: DEBUG oslo_vmware.api [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507808, 'name': PowerOffVM_Task, 'duration_secs': 0.136153} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.131244] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 697.131426] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 697.131673] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ebdc7b42-0641-4d51-a2da-86c5baa465d7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.161136] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 697.161407] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 697.161635] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Deleting the datastore file [datastore2] 3149cd80-503c-42e4-ac91-54aababe84e3 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 697.161940] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b2c7ebd7-990e-4895-87b1-da8ff8d2e9b7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.169568] env[69475]: DEBUG oslo_vmware.api [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for the task: (returnval){ [ 697.169568] env[69475]: value = "task-3507811" [ 697.169568] env[69475]: _type = "Task" [ 697.169568] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.178573] env[69475]: DEBUG oslo_vmware.api [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507811, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.253813] env[69475]: DEBUG nova.compute.utils [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 697.255119] env[69475]: DEBUG nova.compute.manager [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 697.255952] env[69475]: DEBUG nova.network.neutron [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 697.292922] env[69475]: DEBUG nova.policy [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82f6c3724a2b4430b8df87655ff91c63', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1073981d0d7740e78805798e02ff9d55', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 697.346816] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': task-3507809, 'name': Rename_Task, 'duration_secs': 0.157995} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.349689] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 697.350167] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45824267-89c9-4bf9-8908-f8b83d043e52 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.356857] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Waiting for the task: (returnval){ [ 697.356857] env[69475]: value = "task-3507812" [ 697.356857] env[69475]: _type = "Task" [ 697.356857] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.367740] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': task-3507812, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.398533] env[69475]: DEBUG oslo_vmware.api [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507805, 'name': PowerOnVM_Task, 'duration_secs': 1.407291} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.399244] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 697.399463] env[69475]: INFO nova.compute.manager [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Took 8.49 seconds to spawn the instance on the hypervisor. [ 697.399646] env[69475]: DEBUG nova.compute.manager [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 697.400471] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b36b30-3915-4bde-ace2-ba68c6e7922a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.468996] env[69475]: DEBUG nova.compute.manager [req-749a68b7-fc9a-4bb7-bcc4-0a6a9be59a86 req-ddbc68d7-33fa-4be9-a47e-efe085765854 service nova] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Received event network-vif-plugged-f4c64f28-ecc9-429e-b7b1-363190aba0f8 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.469247] env[69475]: DEBUG oslo_concurrency.lockutils [req-749a68b7-fc9a-4bb7-bcc4-0a6a9be59a86 req-ddbc68d7-33fa-4be9-a47e-efe085765854 service nova] Acquiring lock "6f530b86-2ed1-41db-929c-8a5dd61d931a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.469449] env[69475]: DEBUG oslo_concurrency.lockutils [req-749a68b7-fc9a-4bb7-bcc4-0a6a9be59a86 req-ddbc68d7-33fa-4be9-a47e-efe085765854 service nova] Lock "6f530b86-2ed1-41db-929c-8a5dd61d931a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.469628] env[69475]: DEBUG oslo_concurrency.lockutils [req-749a68b7-fc9a-4bb7-bcc4-0a6a9be59a86 req-ddbc68d7-33fa-4be9-a47e-efe085765854 service nova] Lock "6f530b86-2ed1-41db-929c-8a5dd61d931a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.469768] env[69475]: DEBUG nova.compute.manager [req-749a68b7-fc9a-4bb7-bcc4-0a6a9be59a86 req-ddbc68d7-33fa-4be9-a47e-efe085765854 service nova] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] No waiting events found dispatching network-vif-plugged-f4c64f28-ecc9-429e-b7b1-363190aba0f8 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 697.469917] env[69475]: WARNING nova.compute.manager [req-749a68b7-fc9a-4bb7-bcc4-0a6a9be59a86 req-ddbc68d7-33fa-4be9-a47e-efe085765854 service nova] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Received unexpected event network-vif-plugged-f4c64f28-ecc9-429e-b7b1-363190aba0f8 for instance with vm_state building and task_state spawning. [ 697.496818] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Acquiring lock "refresh_cache-6f530b86-2ed1-41db-929c-8a5dd61d931a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.496979] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Acquired lock "refresh_cache-6f530b86-2ed1-41db-929c-8a5dd61d931a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 697.497152] env[69475]: DEBUG nova.network.neutron [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 697.546705] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 697.548277] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-492e6e6f-f5d4-4f34-9399-deaec7fb9672 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.560878] env[69475]: DEBUG oslo_vmware.api [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 697.560878] env[69475]: value = "task-3507813" [ 697.560878] env[69475]: _type = "Task" [ 697.560878] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.570100] env[69475]: DEBUG oslo_vmware.api [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507813, 'name': CloneVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.685893] env[69475]: DEBUG oslo_vmware.api [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Task: {'id': task-3507811, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177364} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.686736] env[69475]: DEBUG nova.network.neutron [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Successfully created port: 2cb85199-de39-4837-a34d-c8ae33659f9b {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 697.688605] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 697.688800] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 697.689079] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 697.689192] env[69475]: INFO nova.compute.manager [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Took 1.09 seconds to destroy the instance on the hypervisor. [ 697.689448] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 697.689647] env[69475]: DEBUG nova.compute.manager [-] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 697.689739] env[69475]: DEBUG nova.network.neutron [-] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 697.714570] env[69475]: DEBUG nova.network.neutron [-] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 697.760855] env[69475]: DEBUG nova.compute.manager [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 697.811280] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65b92a7a-c311-4a2a-9079-19a1f8d2fa3b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.822389] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a0505b-4ef6-4952-b7a1-2609babb5c96 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.855370] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b8334e-d8af-40ca-8de4-d1d7992e8c2e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.870110] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5751e88c-6510-48de-918e-e90ebeadb70a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.874479] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': task-3507812, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.885541] env[69475]: DEBUG nova.compute.provider_tree [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 697.921820] env[69475]: INFO nova.compute.manager [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Took 38.47 seconds to build instance. [ 698.032706] env[69475]: DEBUG nova.network.neutron [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 698.075139] env[69475]: DEBUG oslo_vmware.api [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507813, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.180210] env[69475]: DEBUG nova.network.neutron [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Updating instance_info_cache with network_info: [{"id": "f4c64f28-ecc9-429e-b7b1-363190aba0f8", "address": "fa:16:3e:48:3a:5b", "network": {"id": "bfcc842c-edb1-46a8-96c1-687e65c81f08", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-947348463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55bcc8b22ecf422b860efc0ee731ca37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4c64f28-ec", "ovs_interfaceid": "f4c64f28-ecc9-429e-b7b1-363190aba0f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.217335] env[69475]: DEBUG nova.network.neutron [-] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.370253] env[69475]: DEBUG oslo_vmware.api [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': task-3507812, 'name': PowerOnVM_Task, 'duration_secs': 0.74573} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.370501] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 698.370718] env[69475]: INFO nova.compute.manager [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Took 7.15 seconds to spawn the instance on the hypervisor. [ 698.370892] env[69475]: DEBUG nova.compute.manager [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 698.371643] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48bd5a26-b446-434e-96e2-ccd54f4a50e9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.390769] env[69475]: DEBUG nova.scheduler.client.report [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 698.424514] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b9fa4320-9d1c-4e2a-a9c5-d7132d2e6012 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Lock "4c2e12bf-3f16-47de-a604-44b62a6c7137" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.952s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.575759] env[69475]: DEBUG oslo_vmware.api [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507813, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.682762] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Releasing lock "refresh_cache-6f530b86-2ed1-41db-929c-8a5dd61d931a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 698.683123] env[69475]: DEBUG nova.compute.manager [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Instance network_info: |[{"id": "f4c64f28-ecc9-429e-b7b1-363190aba0f8", "address": "fa:16:3e:48:3a:5b", "network": {"id": "bfcc842c-edb1-46a8-96c1-687e65c81f08", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-947348463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55bcc8b22ecf422b860efc0ee731ca37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4c64f28-ec", "ovs_interfaceid": "f4c64f28-ecc9-429e-b7b1-363190aba0f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 698.683536] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:3a:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6fb0104-186b-4288-b87e-634893f46f01', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f4c64f28-ecc9-429e-b7b1-363190aba0f8', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 698.691248] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Creating folder: Project (55bcc8b22ecf422b860efc0ee731ca37). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 698.691639] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6802647f-cc76-47ae-8f62-5f59e1ec325b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.704558] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Created folder: Project (55bcc8b22ecf422b860efc0ee731ca37) in parent group-v700823. [ 698.704762] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Creating folder: Instances. Parent ref: group-v700923. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 698.704996] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ebf7f332-af81-448b-a939-c605ba977b9a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.715397] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Created folder: Instances in parent group-v700923. [ 698.715397] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 698.715604] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 698.715809] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2db34947-b93a-442a-b8ee-526c0df6ddf1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.730641] env[69475]: INFO nova.compute.manager [-] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Took 1.04 seconds to deallocate network for instance. [ 698.738812] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 698.738812] env[69475]: value = "task-3507816" [ 698.738812] env[69475]: _type = "Task" [ 698.738812] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.748156] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507816, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.770485] env[69475]: DEBUG nova.compute.manager [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 698.800384] env[69475]: DEBUG nova.virt.hardware [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 698.800629] env[69475]: DEBUG nova.virt.hardware [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 698.800811] env[69475]: DEBUG nova.virt.hardware [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 698.800999] env[69475]: DEBUG nova.virt.hardware [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 698.801165] env[69475]: DEBUG nova.virt.hardware [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 698.801306] env[69475]: DEBUG nova.virt.hardware [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 698.801510] env[69475]: DEBUG nova.virt.hardware [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 698.801662] env[69475]: DEBUG nova.virt.hardware [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 698.801834] env[69475]: DEBUG nova.virt.hardware [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 698.801989] env[69475]: DEBUG nova.virt.hardware [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 698.802171] env[69475]: DEBUG nova.virt.hardware [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 698.803333] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf838d5f-b68f-43b1-bf14-d44513256a7c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.813267] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352adf71-0f48-4c1f-b564-14143939bcd1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.890031] env[69475]: INFO nova.compute.manager [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Took 34.00 seconds to build instance. [ 698.896040] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.149s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.899206] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.049s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.899312] env[69475]: DEBUG nova.objects.instance [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lazy-loading 'resources' on Instance uuid c3db35f4-f43d-464c-9556-18a90866ee6a {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 698.923384] env[69475]: INFO nova.scheduler.client.report [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Deleted allocations for instance 77a5665d-b00f-42c2-a1e8-319dfd232b06 [ 698.928120] env[69475]: DEBUG nova.compute.manager [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 699.074180] env[69475]: DEBUG oslo_vmware.api [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507813, 'name': CloneVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.238059] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.250107] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507816, 'name': CreateVM_Task, 'duration_secs': 0.382927} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.250420] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 699.251129] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.251419] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.251723] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 699.252051] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10ab3dee-98ec-402e-a5c6-5655b95ff1f4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.258937] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Waiting for the task: (returnval){ [ 699.258937] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ee4902-ca8b-042f-0ddb-d8c9e33df8d2" [ 699.258937] env[69475]: _type = "Task" [ 699.258937] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.272400] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ee4902-ca8b-042f-0ddb-d8c9e33df8d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.374028] env[69475]: DEBUG nova.network.neutron [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Successfully updated port: 2cb85199-de39-4837-a34d-c8ae33659f9b {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 699.393293] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9a7a28ab-c7e7-4345-a2ac-3b803be2f88f tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Lock "8fbabf86-be9e-47ec-8c4c-adea4c68abe8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.335s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 699.437113] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8622ffb8-829c-40a0-b340-44e09fab1dba tempest-DeleteServersAdminTestJSON-2078656816 tempest-DeleteServersAdminTestJSON-2078656816-project-member] Lock "77a5665d-b00f-42c2-a1e8-319dfd232b06" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.631s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 699.456479] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.575617] env[69475]: DEBUG oslo_vmware.api [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507813, 'name': CloneVM_Task, 'duration_secs': 1.609078} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.575960] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Created linked-clone VM from snapshot [ 699.576931] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75d2d824-e42d-4753-b9dd-76cd2f9f62f4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.593322] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Uploading image 63f82912-4988-46a0-a481-1ebf5f9703db {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 699.622731] env[69475]: DEBUG oslo_vmware.rw_handles [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 699.622731] env[69475]: value = "vm-700922" [ 699.622731] env[69475]: _type = "VirtualMachine" [ 699.622731] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 699.623039] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f9517562-f9df-4dda-9e12-a63f8b668550 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.634859] env[69475]: DEBUG oslo_vmware.rw_handles [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lease: (returnval){ [ 699.634859] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cc2520-c7d7-86a9-05c0-11057858b229" [ 699.634859] env[69475]: _type = "HttpNfcLease" [ 699.634859] env[69475]: } obtained for exporting VM: (result){ [ 699.634859] env[69475]: value = "vm-700922" [ 699.634859] env[69475]: _type = "VirtualMachine" [ 699.634859] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 699.634859] env[69475]: DEBUG oslo_vmware.api [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the lease: (returnval){ [ 699.634859] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cc2520-c7d7-86a9-05c0-11057858b229" [ 699.634859] env[69475]: _type = "HttpNfcLease" [ 699.634859] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 699.648460] env[69475]: DEBUG oslo_concurrency.lockutils [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Acquiring lock "e48e2cc1-7d60-457f-8f1c-649f0dda8cdb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.648753] env[69475]: DEBUG oslo_concurrency.lockutils [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Lock "e48e2cc1-7d60-457f-8f1c-649f0dda8cdb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.648970] env[69475]: DEBUG oslo_concurrency.lockutils [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Acquiring lock "e48e2cc1-7d60-457f-8f1c-649f0dda8cdb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.649174] env[69475]: DEBUG oslo_concurrency.lockutils [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Lock "e48e2cc1-7d60-457f-8f1c-649f0dda8cdb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.649346] env[69475]: DEBUG oslo_concurrency.lockutils [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Lock "e48e2cc1-7d60-457f-8f1c-649f0dda8cdb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 699.651250] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 699.651250] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cc2520-c7d7-86a9-05c0-11057858b229" [ 699.651250] env[69475]: _type = "HttpNfcLease" [ 699.651250] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 699.656224] env[69475]: INFO nova.compute.manager [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Terminating instance [ 699.692992] env[69475]: DEBUG nova.compute.manager [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Received event network-changed-f4c64f28-ecc9-429e-b7b1-363190aba0f8 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 699.693359] env[69475]: DEBUG nova.compute.manager [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Refreshing instance network info cache due to event network-changed-f4c64f28-ecc9-429e-b7b1-363190aba0f8. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 699.694087] env[69475]: DEBUG oslo_concurrency.lockutils [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] Acquiring lock "refresh_cache-6f530b86-2ed1-41db-929c-8a5dd61d931a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.694379] env[69475]: DEBUG oslo_concurrency.lockutils [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] Acquired lock "refresh_cache-6f530b86-2ed1-41db-929c-8a5dd61d931a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.694651] env[69475]: DEBUG nova.network.neutron [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Refreshing network info cache for port f4c64f28-ecc9-429e-b7b1-363190aba0f8 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 699.777883] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ee4902-ca8b-042f-0ddb-d8c9e33df8d2, 'name': SearchDatastore_Task, 'duration_secs': 0.011035} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.778174] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 699.778407] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 699.778740] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.778914] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.779143] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 699.779412] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c635cb81-f8cb-4ed3-b03f-45a9ea04d4a8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.789374] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 699.789585] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 699.790351] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a50c22f-30e6-4c4b-877f-7c8e9b80df8e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.797260] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Waiting for the task: (returnval){ [ 699.797260] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525a4416-d1db-6f10-a3cf-a92e64b3ba1f" [ 699.797260] env[69475]: _type = "Task" [ 699.797260] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.807369] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525a4416-d1db-6f10-a3cf-a92e64b3ba1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.844879] env[69475]: DEBUG oslo_concurrency.lockutils [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Acquiring lock "b255f4d7-b177-4d6c-8a28-dcb5a179c1c0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.845118] env[69475]: DEBUG oslo_concurrency.lockutils [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Lock "b255f4d7-b177-4d6c-8a28-dcb5a179c1c0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.845330] env[69475]: DEBUG oslo_concurrency.lockutils [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Acquiring lock "b255f4d7-b177-4d6c-8a28-dcb5a179c1c0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.845520] env[69475]: DEBUG oslo_concurrency.lockutils [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Lock "b255f4d7-b177-4d6c-8a28-dcb5a179c1c0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.845693] env[69475]: DEBUG oslo_concurrency.lockutils [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Lock "b255f4d7-b177-4d6c-8a28-dcb5a179c1c0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 699.847971] env[69475]: INFO nova.compute.manager [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Terminating instance [ 699.875541] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "refresh_cache-ed12921f-9be8-474d-958e-79dd16b8116e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.875705] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "refresh_cache-ed12921f-9be8-474d-958e-79dd16b8116e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.875846] env[69475]: DEBUG nova.network.neutron [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 699.895636] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 699.948533] env[69475]: DEBUG nova.compute.manager [req-ea15c796-2e0d-4da7-8e64-e2d053e30fe8 req-35093caa-d940-47dc-a516-ae8599b9b92c service nova] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Received event network-changed-a4b5cd8c-fa71-4d57-ba30-262f752f04db {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 699.948533] env[69475]: DEBUG nova.compute.manager [req-ea15c796-2e0d-4da7-8e64-e2d053e30fe8 req-35093caa-d940-47dc-a516-ae8599b9b92c service nova] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Refreshing instance network info cache due to event network-changed-a4b5cd8c-fa71-4d57-ba30-262f752f04db. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 699.948533] env[69475]: DEBUG oslo_concurrency.lockutils [req-ea15c796-2e0d-4da7-8e64-e2d053e30fe8 req-35093caa-d940-47dc-a516-ae8599b9b92c service nova] Acquiring lock "refresh_cache-8fbabf86-be9e-47ec-8c4c-adea4c68abe8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 699.948533] env[69475]: DEBUG oslo_concurrency.lockutils [req-ea15c796-2e0d-4da7-8e64-e2d053e30fe8 req-35093caa-d940-47dc-a516-ae8599b9b92c service nova] Acquired lock "refresh_cache-8fbabf86-be9e-47ec-8c4c-adea4c68abe8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 699.948533] env[69475]: DEBUG nova.network.neutron [req-ea15c796-2e0d-4da7-8e64-e2d053e30fe8 req-35093caa-d940-47dc-a516-ae8599b9b92c service nova] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Refreshing network info cache for port a4b5cd8c-fa71-4d57-ba30-262f752f04db {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 699.961471] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d679926-649c-45ae-b0ff-460ef5b28aa9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.971138] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae00930b-2223-485c-8bed-a9190b23dc34 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.003777] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a8b8b60-743a-485d-9fac-6651b2e63912 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.012147] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d2a05bf-1ab9-4ed7-a7c4-d09d8e2af7e1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.029337] env[69475]: DEBUG nova.compute.provider_tree [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 700.144434] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 700.144434] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cc2520-c7d7-86a9-05c0-11057858b229" [ 700.144434] env[69475]: _type = "HttpNfcLease" [ 700.144434] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 700.146138] env[69475]: DEBUG oslo_vmware.rw_handles [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 700.146138] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cc2520-c7d7-86a9-05c0-11057858b229" [ 700.146138] env[69475]: _type = "HttpNfcLease" [ 700.146138] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 700.147018] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ae3538-0303-4b9d-8a4b-8b7c7d2898df {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.158121] env[69475]: DEBUG oslo_vmware.rw_handles [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525aa988-bb0d-113f-5813-3667ddfbc7e8/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 700.158121] env[69475]: DEBUG oslo_vmware.rw_handles [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525aa988-bb0d-113f-5813-3667ddfbc7e8/disk-0.vmdk for reading. {{(pid=69475) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 700.160408] env[69475]: DEBUG nova.compute.manager [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 700.161059] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 700.167420] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-778a89db-2345-4b38-b77a-b3db311be454 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.229822] env[69475]: DEBUG nova.compute.manager [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 700.232059] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22fa3967-ed7e-48a1-918a-ac6056d877ed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.239493] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 700.240440] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6678c63-ac7b-4644-aa9e-66b25b26d4dc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.250071] env[69475]: DEBUG oslo_vmware.api [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Waiting for the task: (returnval){ [ 700.250071] env[69475]: value = "task-3507818" [ 700.250071] env[69475]: _type = "Task" [ 700.250071] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.260611] env[69475]: DEBUG oslo_vmware.api [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': task-3507818, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.276165] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-979653dc-b05f-4789-8fdb-b83d985e70fb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.319956] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525a4416-d1db-6f10-a3cf-a92e64b3ba1f, 'name': SearchDatastore_Task, 'duration_secs': 0.010583} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.328400] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03d60d25-a5f8-46d7-b90f-d0422cbb2e5d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.336291] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Waiting for the task: (returnval){ [ 700.336291] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d65dab-323e-1e5c-bc37-59afc2c5e372" [ 700.336291] env[69475]: _type = "Task" [ 700.336291] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.351659] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d65dab-323e-1e5c-bc37-59afc2c5e372, 'name': SearchDatastore_Task, 'duration_secs': 0.012605} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.354042] env[69475]: DEBUG nova.compute.manager [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 700.354344] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 700.354686] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 700.354943] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 6f530b86-2ed1-41db-929c-8a5dd61d931a/6f530b86-2ed1-41db-929c-8a5dd61d931a.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 700.356196] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95514b04-e6e8-47ae-96e3-116e316c4f9b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.359426] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da1045a5-1f81-4181-b043-b44729a622d7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.368778] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 700.369954] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00e08eaa-8b0a-43e6-b774-dbf9f6c90f37 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.371702] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Waiting for the task: (returnval){ [ 700.371702] env[69475]: value = "task-3507819" [ 700.371702] env[69475]: _type = "Task" [ 700.371702] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.377193] env[69475]: DEBUG oslo_vmware.api [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Waiting for the task: (returnval){ [ 700.377193] env[69475]: value = "task-3507820" [ 700.377193] env[69475]: _type = "Task" [ 700.377193] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.393462] env[69475]: DEBUG oslo_vmware.api [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': task-3507820, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.435100] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 700.442212] env[69475]: DEBUG nova.network.neutron [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 700.534596] env[69475]: DEBUG nova.scheduler.client.report [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 700.640869] env[69475]: DEBUG nova.network.neutron [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Updating instance_info_cache with network_info: [{"id": "2cb85199-de39-4837-a34d-c8ae33659f9b", "address": "fa:16:3e:45:9d:23", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cb85199-de", "ovs_interfaceid": "2cb85199-de39-4837-a34d-c8ae33659f9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.749953] env[69475]: INFO nova.compute.manager [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] instance snapshotting [ 700.755351] env[69475]: DEBUG nova.network.neutron [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Updated VIF entry in instance network info cache for port f4c64f28-ecc9-429e-b7b1-363190aba0f8. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 700.756129] env[69475]: DEBUG nova.network.neutron [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Updating instance_info_cache with network_info: [{"id": "f4c64f28-ecc9-429e-b7b1-363190aba0f8", "address": "fa:16:3e:48:3a:5b", "network": {"id": "bfcc842c-edb1-46a8-96c1-687e65c81f08", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-947348463-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "55bcc8b22ecf422b860efc0ee731ca37", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6fb0104-186b-4288-b87e-634893f46f01", "external-id": "nsx-vlan-transportzone-73", "segmentation_id": 73, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4c64f28-ec", "ovs_interfaceid": "f4c64f28-ecc9-429e-b7b1-363190aba0f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.758605] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea384528-530a-4511-87cb-fb038a3d8785 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.802867] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4122ad-872c-43f9-b71b-eb7499b54b78 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.807237] env[69475]: DEBUG oslo_vmware.api [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': task-3507818, 'name': PowerOffVM_Task, 'duration_secs': 0.250334} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.807544] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 700.807718] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 700.808947] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e1011b26-c059-424b-9206-874c91b5e930 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.885023] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': task-3507819, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.889281] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 700.889281] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 700.889281] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Deleting the datastore file [datastore2] e48e2cc1-7d60-457f-8f1c-649f0dda8cdb {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 700.893406] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9f866877-87b7-4a02-9a3d-2d365cdcc463 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.895690] env[69475]: DEBUG oslo_vmware.api [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': task-3507820, 'name': PowerOffVM_Task, 'duration_secs': 0.20197} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.895968] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 700.896271] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 700.899646] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f688d51-c267-415a-9d70-d17306a61760 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.905049] env[69475]: DEBUG oslo_vmware.api [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Waiting for the task: (returnval){ [ 700.905049] env[69475]: value = "task-3507822" [ 700.905049] env[69475]: _type = "Task" [ 700.905049] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.913828] env[69475]: DEBUG oslo_vmware.api [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': task-3507822, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.970849] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 700.970963] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 700.971201] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Deleting the datastore file [datastore2] b255f4d7-b177-4d6c-8a28-dcb5a179c1c0 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 700.971528] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0428a876-c464-4bea-8607-e4775da14d5a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.978555] env[69475]: DEBUG oslo_vmware.api [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Waiting for the task: (returnval){ [ 700.978555] env[69475]: value = "task-3507824" [ 700.978555] env[69475]: _type = "Task" [ 700.978555] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.987667] env[69475]: DEBUG oslo_vmware.api [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': task-3507824, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.049252] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.150s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.052835] env[69475]: DEBUG oslo_concurrency.lockutils [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.375s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.052835] env[69475]: DEBUG nova.objects.instance [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lazy-loading 'resources' on Instance uuid c078753c-48a6-490b-8d7d-b0832eced25e {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 701.085214] env[69475]: INFO nova.scheduler.client.report [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Deleted allocations for instance c3db35f4-f43d-464c-9556-18a90866ee6a [ 701.122385] env[69475]: DEBUG nova.network.neutron [req-ea15c796-2e0d-4da7-8e64-e2d053e30fe8 req-35093caa-d940-47dc-a516-ae8599b9b92c service nova] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Updated VIF entry in instance network info cache for port a4b5cd8c-fa71-4d57-ba30-262f752f04db. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 701.122385] env[69475]: DEBUG nova.network.neutron [req-ea15c796-2e0d-4da7-8e64-e2d053e30fe8 req-35093caa-d940-47dc-a516-ae8599b9b92c service nova] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Updating instance_info_cache with network_info: [{"id": "a4b5cd8c-fa71-4d57-ba30-262f752f04db", "address": "fa:16:3e:d8:a4:fb", "network": {"id": "ab157278-7d5e-44e2-bcb7-39ebeeeaa108", "bridge": "br-int", "label": "tempest-ServersTestJSON-1740511078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.151", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "694f14f9b2e64d769ca5ced4d71110c4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f67a2790-f2b0-4d03-b606-0bfaee7a4229", "external-id": "nsx-vlan-transportzone-187", "segmentation_id": 187, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa4b5cd8c-fa", "ovs_interfaceid": "a4b5cd8c-fa71-4d57-ba30-262f752f04db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.143463] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "refresh_cache-ed12921f-9be8-474d-958e-79dd16b8116e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.143942] env[69475]: DEBUG nova.compute.manager [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Instance network_info: |[{"id": "2cb85199-de39-4837-a34d-c8ae33659f9b", "address": "fa:16:3e:45:9d:23", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cb85199-de", "ovs_interfaceid": "2cb85199-de39-4837-a34d-c8ae33659f9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 701.144407] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:9d:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e23c1d18-c841-49ea-95f3-df5ceac28afd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2cb85199-de39-4837-a34d-c8ae33659f9b', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 701.153949] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 701.154917] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 701.155197] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71c3333f-21d4-4bfc-910d-66cc8c11f461 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.178330] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 701.178330] env[69475]: value = "task-3507825" [ 701.178330] env[69475]: _type = "Task" [ 701.178330] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.198227] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507825, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.267855] env[69475]: DEBUG oslo_concurrency.lockutils [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] Releasing lock "refresh_cache-6f530b86-2ed1-41db-929c-8a5dd61d931a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.268482] env[69475]: DEBUG nova.compute.manager [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Received event network-vif-plugged-2cb85199-de39-4837-a34d-c8ae33659f9b {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 701.269144] env[69475]: DEBUG oslo_concurrency.lockutils [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] Acquiring lock "ed12921f-9be8-474d-958e-79dd16b8116e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.269837] env[69475]: DEBUG oslo_concurrency.lockutils [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] Lock "ed12921f-9be8-474d-958e-79dd16b8116e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.270281] env[69475]: DEBUG oslo_concurrency.lockutils [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] Lock "ed12921f-9be8-474d-958e-79dd16b8116e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.270694] env[69475]: DEBUG nova.compute.manager [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] No waiting events found dispatching network-vif-plugged-2cb85199-de39-4837-a34d-c8ae33659f9b {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 701.271120] env[69475]: WARNING nova.compute.manager [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Received unexpected event network-vif-plugged-2cb85199-de39-4837-a34d-c8ae33659f9b for instance with vm_state building and task_state spawning. [ 701.271336] env[69475]: DEBUG nova.compute.manager [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Received event network-changed-2cb85199-de39-4837-a34d-c8ae33659f9b {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 701.271617] env[69475]: DEBUG nova.compute.manager [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Refreshing instance network info cache due to event network-changed-2cb85199-de39-4837-a34d-c8ae33659f9b. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 701.271913] env[69475]: DEBUG oslo_concurrency.lockutils [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] Acquiring lock "refresh_cache-ed12921f-9be8-474d-958e-79dd16b8116e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.272194] env[69475]: DEBUG oslo_concurrency.lockutils [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] Acquired lock "refresh_cache-ed12921f-9be8-474d-958e-79dd16b8116e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 701.272434] env[69475]: DEBUG nova.network.neutron [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Refreshing network info cache for port 2cb85199-de39-4837-a34d-c8ae33659f9b {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 701.323585] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 701.323975] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e8fb7983-b617-48f8-ba12-aee5c33a062b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.333999] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the task: (returnval){ [ 701.333999] env[69475]: value = "task-3507826" [ 701.333999] env[69475]: _type = "Task" [ 701.333999] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.344089] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507826, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.388975] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': task-3507819, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527274} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.389455] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 6f530b86-2ed1-41db-929c-8a5dd61d931a/6f530b86-2ed1-41db-929c-8a5dd61d931a.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 701.389846] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 701.390826] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3c3c6454-d08a-4530-9469-483a3d6956cc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.402193] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Waiting for the task: (returnval){ [ 701.402193] env[69475]: value = "task-3507827" [ 701.402193] env[69475]: _type = "Task" [ 701.402193] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.423548] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': task-3507827, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.434015] env[69475]: DEBUG oslo_vmware.api [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Task: {'id': task-3507822, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.293138} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.434015] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 701.434015] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 701.434015] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 701.434261] env[69475]: INFO nova.compute.manager [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Took 1.27 seconds to destroy the instance on the hypervisor. [ 701.436150] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 701.436150] env[69475]: DEBUG nova.compute.manager [-] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 701.436150] env[69475]: DEBUG nova.network.neutron [-] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 701.490680] env[69475]: DEBUG oslo_vmware.api [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Task: {'id': task-3507824, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.28353} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.491087] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 701.491419] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 701.491706] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 701.492117] env[69475]: INFO nova.compute.manager [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Took 1.14 seconds to destroy the instance on the hypervisor. [ 701.492372] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 701.492636] env[69475]: DEBUG nova.compute.manager [-] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 701.492750] env[69475]: DEBUG nova.network.neutron [-] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 701.600124] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22a58558-79c8-40d5-ae58-86b2beb5462a tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "c3db35f4-f43d-464c-9556-18a90866ee6a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.396s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.625111] env[69475]: DEBUG oslo_concurrency.lockutils [req-ea15c796-2e0d-4da7-8e64-e2d053e30fe8 req-35093caa-d940-47dc-a516-ae8599b9b92c service nova] Releasing lock "refresh_cache-8fbabf86-be9e-47ec-8c4c-adea4c68abe8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 701.696442] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507825, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.846294] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507826, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.922493] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': task-3507827, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.121068} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.925960] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 701.935228] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d082e9-7258-4e1a-bfc1-a950c89057ca {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.966084] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Reconfiguring VM instance instance-00000020 to attach disk [datastore1] 6f530b86-2ed1-41db-929c-8a5dd61d931a/6f530b86-2ed1-41db-929c-8a5dd61d931a.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 701.969449] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe907b28-b8d7-429c-8f0f-ec0a48de38b3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.997911] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Waiting for the task: (returnval){ [ 701.997911] env[69475]: value = "task-3507828" [ 701.997911] env[69475]: _type = "Task" [ 701.997911] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.010091] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': task-3507828, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.091413] env[69475]: DEBUG nova.network.neutron [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Updated VIF entry in instance network info cache for port 2cb85199-de39-4837-a34d-c8ae33659f9b. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 702.092085] env[69475]: DEBUG nova.network.neutron [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Updating instance_info_cache with network_info: [{"id": "2cb85199-de39-4837-a34d-c8ae33659f9b", "address": "fa:16:3e:45:9d:23", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2cb85199-de", "ovs_interfaceid": "2cb85199-de39-4837-a34d-c8ae33659f9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.126349] env[69475]: DEBUG nova.compute.manager [req-3f22ecf6-5887-4966-a55b-9196691dd020 req-c662f7c5-c6d9-4f65-b318-15b1f5867e23 service nova] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Received event network-vif-deleted-01154e4e-cf6b-4d07-92e9-8c4c58376888 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 702.127538] env[69475]: INFO nova.compute.manager [req-3f22ecf6-5887-4966-a55b-9196691dd020 req-c662f7c5-c6d9-4f65-b318-15b1f5867e23 service nova] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Neutron deleted interface 01154e4e-cf6b-4d07-92e9-8c4c58376888; detaching it from the instance and deleting it from the info cache [ 702.127538] env[69475]: DEBUG nova.network.neutron [req-3f22ecf6-5887-4966-a55b-9196691dd020 req-c662f7c5-c6d9-4f65-b318-15b1f5867e23 service nova] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.202035] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507825, 'name': CreateVM_Task, 'duration_secs': 0.554553} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.202434] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 702.203108] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.203345] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 702.203933] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 702.204156] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dff1efc1-62cb-4448-919c-f37fa2683168 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.212365] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 702.212365] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5299ed9c-ae4c-d2e5-43db-966891c44a2d" [ 702.212365] env[69475]: _type = "Task" [ 702.212365] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.229755] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5299ed9c-ae4c-d2e5-43db-966891c44a2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.280142] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32f0bbd6-bed6-4df6-ad2c-c19ba8a2e72f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.291126] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eaa8b15-85cd-4177-a8aa-3f1566ce5c62 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.329516] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1f3215-2ccd-4617-8546-81a6489ceb85 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.346458] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507826, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.347809] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-062633f5-29f9-47a3-9d8d-70b8777a2572 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.364055] env[69475]: DEBUG nova.compute.provider_tree [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 702.508829] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': task-3507828, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.596027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquiring lock "4465f156-09cc-4eba-90e4-be76f3010363" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.596027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "4465f156-09cc-4eba-90e4-be76f3010363" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.596027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquiring lock "4465f156-09cc-4eba-90e4-be76f3010363-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.596027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "4465f156-09cc-4eba-90e4-be76f3010363-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 702.596264] env[69475]: DEBUG oslo_concurrency.lockutils [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "4465f156-09cc-4eba-90e4-be76f3010363-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 702.599063] env[69475]: DEBUG oslo_concurrency.lockutils [req-bb8ff77f-4f85-4d8c-b8d9-22e54da5034e req-fa3feff9-70ff-4907-94bd-06f2816b58a6 service nova] Releasing lock "refresh_cache-ed12921f-9be8-474d-958e-79dd16b8116e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 702.599706] env[69475]: INFO nova.compute.manager [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Terminating instance [ 702.612492] env[69475]: DEBUG nova.network.neutron [-] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.629304] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d0bfef41-0712-4ca4-a00d-a7394d29bae2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.643183] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c54374-c20a-4c60-8728-852bfe5659e9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.680403] env[69475]: DEBUG nova.compute.manager [req-3f22ecf6-5887-4966-a55b-9196691dd020 req-c662f7c5-c6d9-4f65-b318-15b1f5867e23 service nova] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Detach interface failed, port_id=01154e4e-cf6b-4d07-92e9-8c4c58376888, reason: Instance e48e2cc1-7d60-457f-8f1c-649f0dda8cdb could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 702.725192] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5299ed9c-ae4c-d2e5-43db-966891c44a2d, 'name': SearchDatastore_Task, 'duration_secs': 0.02004} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.725710] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 702.726308] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 702.726654] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.726896] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 702.727290] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 702.727717] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ddaf2075-c11b-4bf0-a8c2-d0290de63fcc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.744025] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 702.744025] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 702.744025] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c8a56a2-c72b-491f-b2b6-32e01c2a4aa5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.753994] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 702.753994] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5280b2c8-3add-7e36-2105-a51525ee2df7" [ 702.753994] env[69475]: _type = "Task" [ 702.753994] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 702.765166] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5280b2c8-3add-7e36-2105-a51525ee2df7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 702.844708] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507826, 'name': CreateSnapshot_Task, 'duration_secs': 1.035991} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 702.844994] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 702.845845] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf427b1-a2f1-41eb-91c5-85a6eba707e7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.867342] env[69475]: DEBUG nova.scheduler.client.report [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 702.954775] env[69475]: DEBUG nova.network.neutron [-] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.009706] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': task-3507828, 'name': ReconfigVM_Task, 'duration_secs': 0.806125} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.010717] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Reconfigured VM instance instance-00000020 to attach disk [datastore1] 6f530b86-2ed1-41db-929c-8a5dd61d931a/6f530b86-2ed1-41db-929c-8a5dd61d931a.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 703.010717] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69c1d22a-e7cf-4704-8118-212589014655 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.019660] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Waiting for the task: (returnval){ [ 703.019660] env[69475]: value = "task-3507829" [ 703.019660] env[69475]: _type = "Task" [ 703.019660] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.029339] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': task-3507829, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.104283] env[69475]: DEBUG nova.compute.manager [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 703.104537] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 703.105482] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989bcef4-cfa2-49b5-b4aa-6b903f127ba4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.114610] env[69475]: INFO nova.compute.manager [-] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Took 1.68 seconds to deallocate network for instance. [ 703.114980] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 703.116727] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d544567e-d241-4b27-a873-9e336be4b4fa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.126741] env[69475]: DEBUG oslo_vmware.api [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 703.126741] env[69475]: value = "task-3507830" [ 703.126741] env[69475]: _type = "Task" [ 703.126741] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.135751] env[69475]: DEBUG oslo_vmware.api [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507830, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.269246] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5280b2c8-3add-7e36-2105-a51525ee2df7, 'name': SearchDatastore_Task, 'duration_secs': 0.018311} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.270399] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2960a46d-bc91-4814-85df-85e604c036cf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.277768] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 703.277768] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521f3ace-34df-2e01-a482-9688597ed54a" [ 703.277768] env[69475]: _type = "Task" [ 703.277768] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.289276] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521f3ace-34df-2e01-a482-9688597ed54a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.364806] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 703.365187] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8911bc02-6be6-4068-8ca4-057427525bcc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.372464] env[69475]: DEBUG oslo_concurrency.lockutils [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.320s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.375958] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.677s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.376216] env[69475]: DEBUG nova.objects.instance [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lazy-loading 'resources' on Instance uuid 3eda17da-111c-412d-9af4-d3a40b7d8faa {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 703.377565] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the task: (returnval){ [ 703.377565] env[69475]: value = "task-3507831" [ 703.377565] env[69475]: _type = "Task" [ 703.377565] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.389211] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507831, 'name': CloneVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.397547] env[69475]: INFO nova.scheduler.client.report [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Deleted allocations for instance c078753c-48a6-490b-8d7d-b0832eced25e [ 703.457030] env[69475]: INFO nova.compute.manager [-] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Took 1.96 seconds to deallocate network for instance. [ 703.530188] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': task-3507829, 'name': Rename_Task, 'duration_secs': 0.302547} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.530483] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 703.530738] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-80935709-7c19-4330-8bb1-5cfe7e4962ae {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.541847] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Waiting for the task: (returnval){ [ 703.541847] env[69475]: value = "task-3507832" [ 703.541847] env[69475]: _type = "Task" [ 703.541847] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.551492] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': task-3507832, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.623516] env[69475]: DEBUG oslo_concurrency.lockutils [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.639210] env[69475]: DEBUG oslo_vmware.api [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507830, 'name': PowerOffVM_Task, 'duration_secs': 0.224108} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.639210] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 703.639210] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 703.639403] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a7312a61-7468-4f76-a38e-85d56804c5c2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.715591] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 703.715591] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 703.715591] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Deleting the datastore file [datastore1] 4465f156-09cc-4eba-90e4-be76f3010363 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 703.715874] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67486a37-11ee-438a-aeb4-0f0eb87e384d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.727342] env[69475]: DEBUG oslo_vmware.api [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for the task: (returnval){ [ 703.727342] env[69475]: value = "task-3507834" [ 703.727342] env[69475]: _type = "Task" [ 703.727342] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.737653] env[69475]: DEBUG oslo_vmware.api [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507834, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.770123] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "93607154-f135-4925-9c3a-a97051535b00" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.770366] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "93607154-f135-4925-9c3a-a97051535b00" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.791244] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521f3ace-34df-2e01-a482-9688597ed54a, 'name': SearchDatastore_Task, 'duration_secs': 0.0138} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 703.791642] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 703.791910] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] ed12921f-9be8-474d-958e-79dd16b8116e/ed12921f-9be8-474d-958e-79dd16b8116e.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 703.792423] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-56ca1520-f135-4be7-8b25-c27b992fa295 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.800930] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 703.800930] env[69475]: value = "task-3507835" [ 703.800930] env[69475]: _type = "Task" [ 703.800930] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.812189] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507835, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.891562] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507831, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.904829] env[69475]: DEBUG oslo_concurrency.lockutils [None req-38b902b3-3c48-497c-8104-af8731b3bb84 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "c078753c-48a6-490b-8d7d-b0832eced25e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.898s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.966757] env[69475]: DEBUG oslo_concurrency.lockutils [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.061843] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': task-3507832, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.239310] env[69475]: DEBUG oslo_vmware.api [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Task: {'id': task-3507834, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.229847} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.242834] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 704.243116] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 704.243961] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 704.243961] env[69475]: INFO nova.compute.manager [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Took 1.14 seconds to destroy the instance on the hypervisor. [ 704.243961] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 704.244304] env[69475]: DEBUG nova.compute.manager [-] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 704.244415] env[69475]: DEBUG nova.network.neutron [-] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 704.274992] env[69475]: DEBUG nova.compute.utils [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 704.320810] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507835, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.396028] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507831, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.419623] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a157eb22-552c-426a-96ec-114ac6023908 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.428578] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e57e8cd-b751-4810-9897-852c7f3b5be1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.460536] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973cce03-25ba-4b55-96c2-034d8a5762c4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.469442] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8593755b-6fcf-4527-b909-1cc8e520c902 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.484603] env[69475]: DEBUG nova.compute.provider_tree [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 704.553903] env[69475]: DEBUG oslo_vmware.api [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': task-3507832, 'name': PowerOnVM_Task, 'duration_secs': 0.702822} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.554233] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 704.554433] env[69475]: INFO nova.compute.manager [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Took 8.51 seconds to spawn the instance on the hypervisor. [ 704.554607] env[69475]: DEBUG nova.compute.manager [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 704.555401] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fabe32ea-8734-4dc3-b056-21416d8f47ae {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.778703] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "93607154-f135-4925-9c3a-a97051535b00" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.812956] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507835, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644394} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.814348] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] ed12921f-9be8-474d-958e-79dd16b8116e/ed12921f-9be8-474d-958e-79dd16b8116e.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 704.814348] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 704.814348] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bfbb34e2-7f4e-4c30-9a58-4e3bd4309d5c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.821254] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 704.821254] env[69475]: value = "task-3507836" [ 704.821254] env[69475]: _type = "Task" [ 704.821254] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.831087] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507836, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.838151] env[69475]: DEBUG nova.compute.manager [req-f79f8256-ac77-415c-8ff7-280c24734355 req-76ed54df-8715-401d-8b80-46b52d0db855 service nova] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Received event network-vif-deleted-267ce176-9932-4001-a96f-4e89c511fca3 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 704.895558] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507831, 'name': CloneVM_Task} progress is 95%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.988017] env[69475]: DEBUG nova.scheduler.client.report [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 705.078881] env[69475]: INFO nova.compute.manager [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Took 38.20 seconds to build instance. [ 705.246087] env[69475]: DEBUG nova.network.neutron [-] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.337148] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507836, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085172} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.337148] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 705.338291] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d8c195d-43f4-464a-8956-575bba76c970 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.376235] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] ed12921f-9be8-474d-958e-79dd16b8116e/ed12921f-9be8-474d-958e-79dd16b8116e.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 705.378363] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a4e3277-78bd-41d5-aaa5-442ea5186fbc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.410943] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507831, 'name': CloneVM_Task, 'duration_secs': 1.955236} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.413748] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Created linked-clone VM from snapshot [ 705.414647] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 705.414647] env[69475]: value = "task-3507837" [ 705.414647] env[69475]: _type = "Task" [ 705.414647] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.415533] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96d4799e-6fc5-4401-9a85-fa9c906a7614 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.428155] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Uploading image f52d9638-ce86-4665-bd57-e2ffd4d5d09f {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 705.433263] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507837, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.459160] env[69475]: DEBUG oslo_vmware.rw_handles [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 705.459160] env[69475]: value = "vm-700928" [ 705.459160] env[69475]: _type = "VirtualMachine" [ 705.459160] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 705.459459] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d2d6bd8b-c929-4c5f-8a54-057a446d556a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.468933] env[69475]: DEBUG oslo_vmware.rw_handles [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Lease: (returnval){ [ 705.468933] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f30a1f-ca67-ca05-5baf-54884fbeb493" [ 705.468933] env[69475]: _type = "HttpNfcLease" [ 705.468933] env[69475]: } obtained for exporting VM: (result){ [ 705.468933] env[69475]: value = "vm-700928" [ 705.468933] env[69475]: _type = "VirtualMachine" [ 705.468933] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 705.469471] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the lease: (returnval){ [ 705.469471] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f30a1f-ca67-ca05-5baf-54884fbeb493" [ 705.469471] env[69475]: _type = "HttpNfcLease" [ 705.469471] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 705.479380] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 705.479380] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f30a1f-ca67-ca05-5baf-54884fbeb493" [ 705.479380] env[69475]: _type = "HttpNfcLease" [ 705.479380] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 705.493081] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.117s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.495680] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.280s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.497802] env[69475]: INFO nova.compute.claims [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 705.525401] env[69475]: INFO nova.scheduler.client.report [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Deleted allocations for instance 3eda17da-111c-412d-9af4-d3a40b7d8faa [ 705.582331] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6545c499-fa01-4865-ba51-a41ebb288bb7 tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Lock "6f530b86-2ed1-41db-929c-8a5dd61d931a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.472s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 705.749289] env[69475]: INFO nova.compute.manager [-] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Took 1.50 seconds to deallocate network for instance. [ 705.899082] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "93607154-f135-4925-9c3a-a97051535b00" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 705.899434] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "93607154-f135-4925-9c3a-a97051535b00" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 705.899785] env[69475]: INFO nova.compute.manager [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Attaching volume 9dabd7ab-c858-4db7-b661-6e72ec39b32a to /dev/sdb [ 705.931982] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507837, 'name': ReconfigVM_Task, 'duration_secs': 0.346164} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 705.932319] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Reconfigured VM instance instance-00000021 to attach disk [datastore1] ed12921f-9be8-474d-958e-79dd16b8116e/ed12921f-9be8-474d-958e-79dd16b8116e.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 705.935204] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c708f2b5-138a-41a5-b778-f4ad5810bf98 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.943692] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 705.943692] env[69475]: value = "task-3507839" [ 705.943692] env[69475]: _type = "Task" [ 705.943692] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.951675] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e2c3c6-8539-4765-9e5f-e70e46547a66 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.959257] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507839, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.964894] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0a3d74-cda8-4a64-9e39-cd1e45736f50 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.977992] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 705.977992] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f30a1f-ca67-ca05-5baf-54884fbeb493" [ 705.977992] env[69475]: _type = "HttpNfcLease" [ 705.977992] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 705.978590] env[69475]: DEBUG oslo_vmware.rw_handles [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 705.978590] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f30a1f-ca67-ca05-5baf-54884fbeb493" [ 705.978590] env[69475]: _type = "HttpNfcLease" [ 705.978590] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 705.979166] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23767d2-f12c-42b2-a8a0-4a2fa903ae6a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.986533] env[69475]: DEBUG nova.virt.block_device [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Updating existing volume attachment record: 2d6756ee-9115-4b2d-bdc9-b854e2580671 {{(pid=69475) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 705.997718] env[69475]: DEBUG oslo_vmware.rw_handles [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526a64b8-b260-5a96-5c3f-311e56759e10/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 705.997718] env[69475]: DEBUG oslo_vmware.rw_handles [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526a64b8-b260-5a96-5c3f-311e56759e10/disk-0.vmdk for reading. {{(pid=69475) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 706.065965] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e4cebd7b-0c3e-4364-89a9-6ab070a03820 tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "3eda17da-111c-412d-9af4-d3a40b7d8faa" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.899s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.087106] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 706.103892] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-80321c2c-7aff-4a59-b236-bcdf8d7eb689 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.259527] env[69475]: DEBUG oslo_concurrency.lockutils [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.359551] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Acquiring lock "6f530b86-2ed1-41db-929c-8a5dd61d931a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.359551] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Lock "6f530b86-2ed1-41db-929c-8a5dd61d931a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.359551] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Acquiring lock "6f530b86-2ed1-41db-929c-8a5dd61d931a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.359551] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Lock "6f530b86-2ed1-41db-929c-8a5dd61d931a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.360040] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Lock "6f530b86-2ed1-41db-929c-8a5dd61d931a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.360658] env[69475]: INFO nova.compute.manager [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Terminating instance [ 706.460622] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507839, 'name': Rename_Task, 'duration_secs': 0.184634} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 706.461081] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 706.461392] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02c36ee9-6a05-4c0c-bcd1-24699f5c06f7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.470558] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 706.470558] env[69475]: value = "task-3507843" [ 706.470558] env[69475]: _type = "Task" [ 706.470558] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.479918] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507843, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.639031] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.865904] env[69475]: DEBUG nova.compute.manager [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 706.867034] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 706.867552] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c461db26-0805-481f-b166-df67491025f3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.880617] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 706.880739] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb0b1b08-970b-4f4f-a747-342f2ae43324 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.890557] env[69475]: DEBUG oslo_vmware.api [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Waiting for the task: (returnval){ [ 706.890557] env[69475]: value = "task-3507844" [ 706.890557] env[69475]: _type = "Task" [ 706.890557] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 706.902262] env[69475]: DEBUG oslo_vmware.api [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': task-3507844, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.987397] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507843, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.188245] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d87dd77-2570-46c8-b253-4eb7c5129013 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.199720] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e49a2e6c-4915-481c-b7e1-935b7124e26c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.240855] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf57a5ff-7b57-4a98-8251-159e9d2df0d1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.247045] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "3fba85c9-7798-4a66-b335-21f80962e0bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.247426] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "3fba85c9-7798-4a66-b335-21f80962e0bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 707.254713] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ec964d-752a-489d-b975-cd80bf5c9d01 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.275180] env[69475]: DEBUG nova.compute.provider_tree [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 707.279924] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "420ecc09-60c8-4a14-8504-d11d760ddbb4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.280352] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "420ecc09-60c8-4a14-8504-d11d760ddbb4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 707.402289] env[69475]: DEBUG oslo_vmware.api [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': task-3507844, 'name': PowerOffVM_Task, 'duration_secs': 0.265331} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.402586] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 707.402763] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 707.403169] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eec37258-8b9e-485e-a6f1-4e6904d32e34 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.487644] env[69475]: DEBUG oslo_vmware.api [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507843, 'name': PowerOnVM_Task, 'duration_secs': 0.571287} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.489255] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 707.489566] env[69475]: INFO nova.compute.manager [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Took 8.72 seconds to spawn the instance on the hypervisor. [ 707.489771] env[69475]: DEBUG nova.compute.manager [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 707.490360] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 707.490564] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 707.490769] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Deleting the datastore file [datastore1] 6f530b86-2ed1-41db-929c-8a5dd61d931a {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 707.491592] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a76aa1-ba89-469a-aedd-68c863de26cc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.494735] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50d7a765-9339-400d-b647-56d80c907788 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.504415] env[69475]: DEBUG oslo_vmware.api [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Waiting for the task: (returnval){ [ 707.504415] env[69475]: value = "task-3507846" [ 707.504415] env[69475]: _type = "Task" [ 707.504415] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.514173] env[69475]: DEBUG oslo_vmware.api [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': task-3507846, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.781605] env[69475]: DEBUG nova.scheduler.client.report [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 708.015743] env[69475]: INFO nova.compute.manager [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Took 37.98 seconds to build instance. [ 708.021728] env[69475]: DEBUG oslo_vmware.api [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Task: {'id': task-3507846, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.287059} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.021911] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 708.022133] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 708.022330] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 708.022509] env[69475]: INFO nova.compute.manager [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Took 1.16 seconds to destroy the instance on the hypervisor. [ 708.022751] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 708.022948] env[69475]: DEBUG nova.compute.manager [-] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 708.023065] env[69475]: DEBUG nova.network.neutron [-] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 708.287995] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.792s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.288708] env[69475]: DEBUG nova.compute.manager [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 708.291925] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.511s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 708.293419] env[69475]: INFO nova.compute.claims [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 708.518174] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7e33e979-5d84-4adf-87d1-c7cff08def49 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "ed12921f-9be8-474d-958e-79dd16b8116e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.447s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 708.753684] env[69475]: DEBUG nova.network.neutron [-] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.798413] env[69475]: DEBUG nova.compute.utils [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 708.803526] env[69475]: DEBUG nova.compute.manager [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 708.803526] env[69475]: DEBUG nova.network.neutron [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 708.846356] env[69475]: DEBUG nova.policy [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5377694faa9c46d98c795f650c385e42', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd041345f126f4ad69469a2771e411ce9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 709.021156] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 709.117407] env[69475]: DEBUG oslo_vmware.rw_handles [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525aa988-bb0d-113f-5813-3667ddfbc7e8/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 709.118135] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1317cf20-50e9-4794-bdb6-cfae3cdb39f7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.125846] env[69475]: DEBUG oslo_vmware.rw_handles [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525aa988-bb0d-113f-5813-3667ddfbc7e8/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 709.126045] env[69475]: ERROR oslo_vmware.rw_handles [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525aa988-bb0d-113f-5813-3667ddfbc7e8/disk-0.vmdk due to incomplete transfer. [ 709.126285] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-69a9b2ac-f175-4816-88b2-c11066d35eed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.137595] env[69475]: DEBUG oslo_vmware.rw_handles [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525aa988-bb0d-113f-5813-3667ddfbc7e8/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 709.137834] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Uploaded image 63f82912-4988-46a0-a481-1ebf5f9703db to the Glance image server {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 709.140075] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 709.140218] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5437378d-d9e5-4aa5-ba15-4ee874a0cf6a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.150459] env[69475]: DEBUG oslo_vmware.api [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 709.150459] env[69475]: value = "task-3507848" [ 709.150459] env[69475]: _type = "Task" [ 709.150459] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.159210] env[69475]: DEBUG oslo_vmware.api [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507848, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.234236] env[69475]: DEBUG nova.network.neutron [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Successfully created port: dd8084ea-8138-439f-a367-0e57562094f5 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 709.257233] env[69475]: INFO nova.compute.manager [-] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Took 1.23 seconds to deallocate network for instance. [ 709.301961] env[69475]: DEBUG nova.compute.manager [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 709.543858] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.662220] env[69475]: DEBUG oslo_vmware.api [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507848, 'name': Destroy_Task, 'duration_secs': 0.339369} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.663351] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Destroyed the VM [ 709.663669] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 709.663951] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6587e91e-a708-40c3-96b8-ec5ed517c4f2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.677018] env[69475]: DEBUG oslo_vmware.api [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 709.677018] env[69475]: value = "task-3507849" [ 709.677018] env[69475]: _type = "Task" [ 709.677018] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.687220] env[69475]: DEBUG oslo_vmware.api [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507849, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.704128] env[69475]: DEBUG nova.compute.manager [req-dbd75cc9-7fe2-4fea-becd-45679ebfb981 req-3bf6545b-bb47-4c07-b8f8-f7b5a9ecce08 service nova] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Received event network-vif-deleted-e1445b37-7f07-4058-88cb-07a6189aa684 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 709.765039] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.856486] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3875973c-7927-4982-93fd-8e54d6618bc3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.865999] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6143747c-b9b2-4bf8-95aa-8b5e17d5f581 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.901718] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f4f8582-d46c-4472-baf6-acae8056ab7b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.911574] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86a3b50-e043-4a39-989f-de10caa503c2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.927655] env[69475]: DEBUG nova.compute.provider_tree [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.176656] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Acquiring lock "41c23568-c8d7-4d6c-8cc4-a94c95b3223a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.176656] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Lock "41c23568-c8d7-4d6c-8cc4-a94c95b3223a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.187476] env[69475]: DEBUG oslo_vmware.api [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507849, 'name': RemoveSnapshot_Task, 'duration_secs': 0.378747} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.187764] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 710.187997] env[69475]: INFO nova.compute.manager [None req-a7dfb48a-4a14-41fd-8e00-241921b4de37 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Took 14.74 seconds to snapshot the instance on the hypervisor. [ 710.315448] env[69475]: DEBUG nova.compute.manager [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 710.358768] env[69475]: DEBUG nova.virt.hardware [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 710.359098] env[69475]: DEBUG nova.virt.hardware [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 710.359325] env[69475]: DEBUG nova.virt.hardware [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 710.359547] env[69475]: DEBUG nova.virt.hardware [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 710.359728] env[69475]: DEBUG nova.virt.hardware [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 710.359910] env[69475]: DEBUG nova.virt.hardware [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 710.360157] env[69475]: DEBUG nova.virt.hardware [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 710.360323] env[69475]: DEBUG nova.virt.hardware [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 710.360501] env[69475]: DEBUG nova.virt.hardware [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 710.360669] env[69475]: DEBUG nova.virt.hardware [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 710.360894] env[69475]: DEBUG nova.virt.hardware [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 710.361792] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec1b07e-e80d-49d0-bfd9-018dc2c6b8e2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.371724] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9f71abd-775a-49fa-a496-9820969cfd0e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.431049] env[69475]: DEBUG nova.scheduler.client.report [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 710.937050] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.645s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.937526] env[69475]: DEBUG nova.compute.manager [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 710.940704] env[69475]: DEBUG oslo_concurrency.lockutils [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.058s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.942180] env[69475]: INFO nova.compute.claims [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 710.972521] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "9e2d4d61-71ed-447a-b28e-c29c5bd8d763" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.972784] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "9e2d4d61-71ed-447a-b28e-c29c5bd8d763" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.972990] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "9e2d4d61-71ed-447a-b28e-c29c5bd8d763-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.973192] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "9e2d4d61-71ed-447a-b28e-c29c5bd8d763-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.973361] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "9e2d4d61-71ed-447a-b28e-c29c5bd8d763-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.975718] env[69475]: INFO nova.compute.manager [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Terminating instance [ 711.062191] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Volume attach. Driver type: vmdk {{(pid=69475) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 711.062444] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700930', 'volume_id': '9dabd7ab-c858-4db7-b661-6e72ec39b32a', 'name': 'volume-9dabd7ab-c858-4db7-b661-6e72ec39b32a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '93607154-f135-4925-9c3a-a97051535b00', 'attached_at': '', 'detached_at': '', 'volume_id': '9dabd7ab-c858-4db7-b661-6e72ec39b32a', 'serial': '9dabd7ab-c858-4db7-b661-6e72ec39b32a'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 711.063416] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e655cce3-90b3-4be4-9dfd-772b809cec59 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.084114] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450ee03b-08c7-4e32-8017-a870b46e11ea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.112026] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Reconfiguring VM instance instance-0000000f to attach disk [datastore2] volume-9dabd7ab-c858-4db7-b661-6e72ec39b32a/volume-9dabd7ab-c858-4db7-b661-6e72ec39b32a.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 711.112854] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9c0287c-ba1a-423e-ab16-3c774ffe1f8c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.134579] env[69475]: DEBUG oslo_vmware.api [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 711.134579] env[69475]: value = "task-3507850" [ 711.134579] env[69475]: _type = "Task" [ 711.134579] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.144466] env[69475]: DEBUG oslo_vmware.api [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507850, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.158145] env[69475]: DEBUG nova.network.neutron [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Successfully updated port: dd8084ea-8138-439f-a367-0e57562094f5 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 711.447604] env[69475]: DEBUG nova.compute.utils [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 711.451309] env[69475]: DEBUG nova.compute.manager [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 711.451493] env[69475]: DEBUG nova.network.neutron [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 711.483564] env[69475]: DEBUG nova.compute.manager [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 711.483826] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 711.485460] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3425a56c-7a95-4193-b0ac-6f8805636c5c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.495240] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 711.495519] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-afb5ea6f-e2a5-42cb-a193-ce53064ffde2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.535578] env[69475]: DEBUG nova.policy [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '32d8efff6f9e4846b49febaf379f07fb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1784f9c01de49c494bc44e0272c02cf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 711.564247] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 711.564247] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 711.564420] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Deleting the datastore file [datastore1] 9e2d4d61-71ed-447a-b28e-c29c5bd8d763 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 711.564802] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4085f35-415c-449d-a399-71362e539178 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.576043] env[69475]: DEBUG oslo_vmware.api [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 711.576043] env[69475]: value = "task-3507852" [ 711.576043] env[69475]: _type = "Task" [ 711.576043] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.583013] env[69475]: DEBUG oslo_vmware.api [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507852, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.649776] env[69475]: DEBUG oslo_vmware.api [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507850, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.664206] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Acquiring lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.664206] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Acquired lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 711.664206] env[69475]: DEBUG nova.network.neutron [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 711.736211] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.736444] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.854267] env[69475]: INFO nova.compute.manager [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Rebuilding instance [ 711.899441] env[69475]: DEBUG nova.compute.manager [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 711.900330] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-753d325f-82a0-48d4-9af2-aeb8273d3d36 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.932746] env[69475]: DEBUG nova.compute.manager [req-c2616b85-aeae-40ec-bb37-abccf8366345 req-055edc88-d8a4-49d9-a5ce-ed169544e755 service nova] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Received event network-vif-deleted-f4c64f28-ecc9-429e-b7b1-363190aba0f8 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 711.932969] env[69475]: DEBUG nova.compute.manager [req-c2616b85-aeae-40ec-bb37-abccf8366345 req-055edc88-d8a4-49d9-a5ce-ed169544e755 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Received event network-vif-plugged-dd8084ea-8138-439f-a367-0e57562094f5 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 711.933446] env[69475]: DEBUG oslo_concurrency.lockutils [req-c2616b85-aeae-40ec-bb37-abccf8366345 req-055edc88-d8a4-49d9-a5ce-ed169544e755 service nova] Acquiring lock "00ba5cd8-3516-4059-bcda-c2d01e165e07-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 711.933663] env[69475]: DEBUG oslo_concurrency.lockutils [req-c2616b85-aeae-40ec-bb37-abccf8366345 req-055edc88-d8a4-49d9-a5ce-ed169544e755 service nova] Lock "00ba5cd8-3516-4059-bcda-c2d01e165e07-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.933824] env[69475]: DEBUG oslo_concurrency.lockutils [req-c2616b85-aeae-40ec-bb37-abccf8366345 req-055edc88-d8a4-49d9-a5ce-ed169544e755 service nova] Lock "00ba5cd8-3516-4059-bcda-c2d01e165e07-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.933991] env[69475]: DEBUG nova.compute.manager [req-c2616b85-aeae-40ec-bb37-abccf8366345 req-055edc88-d8a4-49d9-a5ce-ed169544e755 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] No waiting events found dispatching network-vif-plugged-dd8084ea-8138-439f-a367-0e57562094f5 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 711.934196] env[69475]: WARNING nova.compute.manager [req-c2616b85-aeae-40ec-bb37-abccf8366345 req-055edc88-d8a4-49d9-a5ce-ed169544e755 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Received unexpected event network-vif-plugged-dd8084ea-8138-439f-a367-0e57562094f5 for instance with vm_state building and task_state spawning. [ 711.934356] env[69475]: DEBUG nova.compute.manager [req-c2616b85-aeae-40ec-bb37-abccf8366345 req-055edc88-d8a4-49d9-a5ce-ed169544e755 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Received event network-changed-dd8084ea-8138-439f-a367-0e57562094f5 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 711.934526] env[69475]: DEBUG nova.compute.manager [req-c2616b85-aeae-40ec-bb37-abccf8366345 req-055edc88-d8a4-49d9-a5ce-ed169544e755 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Refreshing instance network info cache due to event network-changed-dd8084ea-8138-439f-a367-0e57562094f5. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 711.934677] env[69475]: DEBUG oslo_concurrency.lockutils [req-c2616b85-aeae-40ec-bb37-abccf8366345 req-055edc88-d8a4-49d9-a5ce-ed169544e755 service nova] Acquiring lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.952129] env[69475]: DEBUG nova.compute.manager [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 712.052250] env[69475]: DEBUG nova.network.neutron [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Successfully created port: 5c37f4d6-0e34-4637-ac7c-73daa6f83e42 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 712.085017] env[69475]: DEBUG oslo_vmware.api [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3507852, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.308533} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.085301] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 712.085484] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 712.085661] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 712.085832] env[69475]: INFO nova.compute.manager [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Took 0.60 seconds to destroy the instance on the hypervisor. [ 712.086105] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 712.086318] env[69475]: DEBUG nova.compute.manager [-] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 712.086410] env[69475]: DEBUG nova.network.neutron [-] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 712.150204] env[69475]: DEBUG oslo_vmware.api [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507850, 'name': ReconfigVM_Task, 'duration_secs': 0.590375} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.151082] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Reconfigured VM instance instance-0000000f to attach disk [datastore2] volume-9dabd7ab-c858-4db7-b661-6e72ec39b32a/volume-9dabd7ab-c858-4db7-b661-6e72ec39b32a.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 712.155613] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a00bf228-0a02-4057-b3ad-b228adfdfef7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.174954] env[69475]: DEBUG oslo_vmware.api [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 712.174954] env[69475]: value = "task-3507853" [ 712.174954] env[69475]: _type = "Task" [ 712.174954] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.185327] env[69475]: DEBUG oslo_vmware.api [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507853, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.208572] env[69475]: DEBUG nova.network.neutron [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 712.528067] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d538b070-6f5c-4ebd-bfc3-4d4c067e4170 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.537513] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c3c1ab-8b0c-46be-bce1-f858599b211c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.577461] env[69475]: DEBUG nova.network.neutron [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Updating instance_info_cache with network_info: [{"id": "dd8084ea-8138-439f-a367-0e57562094f5", "address": "fa:16:3e:be:9d:ab", "network": {"id": "e8c8708c-443c-4213-8716-88b361366d50", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-13457515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d041345f126f4ad69469a2771e411ce9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1520c99-af74-4d61-a8ae-56aef56ef4f0", "external-id": "nsx-vlan-transportzone-891", "segmentation_id": 891, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd8084ea-81", "ovs_interfaceid": "dd8084ea-8138-439f-a367-0e57562094f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.579234] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c00492-9d6f-40fb-8fbd-ed1cc53e57c5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.588059] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466052b6-d9e0-43e5-87a1-a045d877ca9a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.604852] env[69475]: DEBUG nova.compute.provider_tree [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.686220] env[69475]: DEBUG oslo_vmware.api [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507853, 'name': ReconfigVM_Task, 'duration_secs': 0.198658} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.686549] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700930', 'volume_id': '9dabd7ab-c858-4db7-b661-6e72ec39b32a', 'name': 'volume-9dabd7ab-c858-4db7-b661-6e72ec39b32a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '93607154-f135-4925-9c3a-a97051535b00', 'attached_at': '', 'detached_at': '', 'volume_id': '9dabd7ab-c858-4db7-b661-6e72ec39b32a', 'serial': '9dabd7ab-c858-4db7-b661-6e72ec39b32a'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 712.914536] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 712.914628] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d366bbd7-bd5c-4fef-b4bf-60028cf6b6e8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.922556] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 712.922556] env[69475]: value = "task-3507854" [ 712.922556] env[69475]: _type = "Task" [ 712.922556] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.931767] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507854, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.968765] env[69475]: DEBUG nova.compute.manager [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 712.998183] env[69475]: DEBUG nova.virt.hardware [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 712.998479] env[69475]: DEBUG nova.virt.hardware [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 712.998639] env[69475]: DEBUG nova.virt.hardware [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 712.998846] env[69475]: DEBUG nova.virt.hardware [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 712.999041] env[69475]: DEBUG nova.virt.hardware [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 712.999220] env[69475]: DEBUG nova.virt.hardware [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 712.999449] env[69475]: DEBUG nova.virt.hardware [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 712.999624] env[69475]: DEBUG nova.virt.hardware [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 712.999810] env[69475]: DEBUG nova.virt.hardware [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 712.999986] env[69475]: DEBUG nova.virt.hardware [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 713.000181] env[69475]: DEBUG nova.virt.hardware [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 713.001182] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91782b7e-b958-419a-b4cc-4d3de8ff91bb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.010667] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cc778e2-16d7-43af-921e-a2928717563d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.083505] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Releasing lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 713.083901] env[69475]: DEBUG nova.compute.manager [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Instance network_info: |[{"id": "dd8084ea-8138-439f-a367-0e57562094f5", "address": "fa:16:3e:be:9d:ab", "network": {"id": "e8c8708c-443c-4213-8716-88b361366d50", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-13457515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d041345f126f4ad69469a2771e411ce9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1520c99-af74-4d61-a8ae-56aef56ef4f0", "external-id": "nsx-vlan-transportzone-891", "segmentation_id": 891, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd8084ea-81", "ovs_interfaceid": "dd8084ea-8138-439f-a367-0e57562094f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 713.084248] env[69475]: DEBUG oslo_concurrency.lockutils [req-c2616b85-aeae-40ec-bb37-abccf8366345 req-055edc88-d8a4-49d9-a5ce-ed169544e755 service nova] Acquired lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.084445] env[69475]: DEBUG nova.network.neutron [req-c2616b85-aeae-40ec-bb37-abccf8366345 req-055edc88-d8a4-49d9-a5ce-ed169544e755 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Refreshing network info cache for port dd8084ea-8138-439f-a367-0e57562094f5 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 713.086066] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:be:9d:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1520c99-af74-4d61-a8ae-56aef56ef4f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dd8084ea-8138-439f-a367-0e57562094f5', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 713.093888] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Creating folder: Project (d041345f126f4ad69469a2771e411ce9). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 713.097690] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5abbcad0-d72f-475e-9398-740177960c0f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.107610] env[69475]: DEBUG nova.scheduler.client.report [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 713.114647] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Created folder: Project (d041345f126f4ad69469a2771e411ce9) in parent group-v700823. [ 713.114817] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Creating folder: Instances. Parent ref: group-v700931. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 713.114999] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc5ad114-2394-461b-8fc6-5aee83ca32ea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.119435] env[69475]: DEBUG nova.network.neutron [-] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.128525] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Created folder: Instances in parent group-v700931. [ 713.128658] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 713.128870] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 713.129439] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2c2e94bf-c8a1-4cc9-bb18-fe753489fc4e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.155027] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 713.155027] env[69475]: value = "task-3507857" [ 713.155027] env[69475]: _type = "Task" [ 713.155027] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.164192] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507857, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.359613] env[69475]: DEBUG nova.network.neutron [req-c2616b85-aeae-40ec-bb37-abccf8366345 req-055edc88-d8a4-49d9-a5ce-ed169544e755 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Updated VIF entry in instance network info cache for port dd8084ea-8138-439f-a367-0e57562094f5. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 713.360016] env[69475]: DEBUG nova.network.neutron [req-c2616b85-aeae-40ec-bb37-abccf8366345 req-055edc88-d8a4-49d9-a5ce-ed169544e755 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Updating instance_info_cache with network_info: [{"id": "dd8084ea-8138-439f-a367-0e57562094f5", "address": "fa:16:3e:be:9d:ab", "network": {"id": "e8c8708c-443c-4213-8716-88b361366d50", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-13457515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "d041345f126f4ad69469a2771e411ce9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1520c99-af74-4d61-a8ae-56aef56ef4f0", "external-id": "nsx-vlan-transportzone-891", "segmentation_id": 891, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd8084ea-81", "ovs_interfaceid": "dd8084ea-8138-439f-a367-0e57562094f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.436456] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507854, 'name': PowerOffVM_Task, 'duration_secs': 0.221899} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.436661] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 713.437374] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 713.438165] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de85745-74f7-4bd6-9903-d10dbe09ac50 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.449385] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 713.449759] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2abe0827-6d60-4a8e-8a40-da2421417716 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.516826] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 713.517151] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 713.517378] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleting the datastore file [datastore1] ed12921f-9be8-474d-958e-79dd16b8116e {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 713.517745] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c5f943f-dc7f-4ebd-a7f1-ea03470ed3cd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.526388] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 713.526388] env[69475]: value = "task-3507859" [ 713.526388] env[69475]: _type = "Task" [ 713.526388] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.535235] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507859, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.614407] env[69475]: DEBUG oslo_concurrency.lockutils [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.674s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.614967] env[69475]: DEBUG nova.compute.manager [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 713.617681] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 28.842s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.617976] env[69475]: DEBUG nova.objects.instance [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69475) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 713.621844] env[69475]: INFO nova.compute.manager [-] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Took 1.54 seconds to deallocate network for instance. [ 713.666356] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507857, 'name': CreateVM_Task, 'duration_secs': 0.335172} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.666529] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 713.667213] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.667384] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.667887] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 713.669260] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d911f3b6-d836-4de1-8ac9-0bdc458de06a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.675468] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Waiting for the task: (returnval){ [ 713.675468] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527c3117-89a8-a43d-de1c-0836887c4bbe" [ 713.675468] env[69475]: _type = "Task" [ 713.675468] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.684687] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527c3117-89a8-a43d-de1c-0836887c4bbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.728322] env[69475]: DEBUG nova.objects.instance [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lazy-loading 'flavor' on Instance uuid 93607154-f135-4925-9c3a-a97051535b00 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 713.766986] env[69475]: DEBUG oslo_vmware.rw_handles [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526a64b8-b260-5a96-5c3f-311e56759e10/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 713.767926] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a18159-552a-4297-b842-0f6b41c5d046 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.775091] env[69475]: DEBUG oslo_vmware.rw_handles [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526a64b8-b260-5a96-5c3f-311e56759e10/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 713.775262] env[69475]: ERROR oslo_vmware.rw_handles [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526a64b8-b260-5a96-5c3f-311e56759e10/disk-0.vmdk due to incomplete transfer. [ 713.775495] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0fa8b666-86e2-40eb-983b-5e8bff0e01cc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.783748] env[69475]: DEBUG oslo_vmware.rw_handles [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526a64b8-b260-5a96-5c3f-311e56759e10/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 713.783941] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Uploaded image f52d9638-ce86-4665-bd57-e2ffd4d5d09f to the Glance image server {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 713.785534] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 713.785785] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-abc954c8-e143-4035-82c0-ae50b99bd26d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.793726] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the task: (returnval){ [ 713.793726] env[69475]: value = "task-3507860" [ 713.793726] env[69475]: _type = "Task" [ 713.793726] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.803378] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507860, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.863018] env[69475]: DEBUG oslo_concurrency.lockutils [req-c2616b85-aeae-40ec-bb37-abccf8366345 req-055edc88-d8a4-49d9-a5ce-ed169544e755 service nova] Releasing lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 714.036770] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507859, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239063} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.037065] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 714.037273] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 714.037449] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 714.122953] env[69475]: DEBUG nova.compute.utils [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 714.128020] env[69475]: DEBUG nova.compute.manager [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 714.128218] env[69475]: DEBUG nova.network.neutron [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 714.132135] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 714.188136] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527c3117-89a8-a43d-de1c-0836887c4bbe, 'name': SearchDatastore_Task, 'duration_secs': 0.03284} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.188446] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 714.188690] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 714.188962] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.189132] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 714.189318] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 714.189593] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c08e0855-ba0c-4e50-8c8c-5b87dcf62ef3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.199769] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 714.199849] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 714.201985] env[69475]: DEBUG nova.policy [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52e0b5ed347744ec8a9a1c432c741814', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6fe52710b9d1461ea46698c9cf7bafb2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 714.203802] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f527721-2dc7-4f9e-ac26-1913c4ebcbc1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.211087] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Waiting for the task: (returnval){ [ 714.211087] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5234bf8c-4f26-e653-38b5-cb6132d3e1a1" [ 714.211087] env[69475]: _type = "Task" [ 714.211087] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.221611] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5234bf8c-4f26-e653-38b5-cb6132d3e1a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.234451] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f455379b-d6d7-4674-8c28-a23b535658c4 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "93607154-f135-4925-9c3a-a97051535b00" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.335s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.304968] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507860, 'name': Destroy_Task, 'duration_secs': 0.307171} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.305256] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Destroyed the VM [ 714.305486] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 714.305736] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-a7295b5e-f14e-493c-a947-b1fbd85337bc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.316831] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the task: (returnval){ [ 714.316831] env[69475]: value = "task-3507861" [ 714.316831] env[69475]: _type = "Task" [ 714.316831] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.319077] env[69475]: DEBUG nova.network.neutron [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Successfully updated port: 5c37f4d6-0e34-4637-ac7c-73daa6f83e42 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 714.331670] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507861, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.336092] env[69475]: DEBUG nova.compute.manager [req-deb2fe35-a410-4457-bfe5-57298220a1c3 req-a82d2bb3-00ad-4983-886c-f16626179456 service nova] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Received event network-vif-deleted-241f2dac-56bc-473f-8ee9-6df190c99664 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 714.629422] env[69475]: DEBUG nova.compute.manager [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 714.636020] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0d0f3f5-099e-480c-bd9c-412fe6afbe84 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.636020] env[69475]: DEBUG oslo_concurrency.lockutils [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.736s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 714.636213] env[69475]: INFO nova.compute.claims [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 714.722618] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5234bf8c-4f26-e653-38b5-cb6132d3e1a1, 'name': SearchDatastore_Task, 'duration_secs': 0.01107} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.725327] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d2869b6-e258-4a91-9bdf-82bd1619ebd5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.732435] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Waiting for the task: (returnval){ [ 714.732435] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524f4034-1992-b0b6-6635-f0aa38f1f6c2" [ 714.732435] env[69475]: _type = "Task" [ 714.732435] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.739881] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524f4034-1992-b0b6-6635-f0aa38f1f6c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.741761] env[69475]: DEBUG nova.network.neutron [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Successfully created port: 5abe617e-d18a-416f-8c40-d0da33a563d2 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 714.824583] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "refresh_cache-d1e5e08d-b41a-4655-997d-91fbd3581f00" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.824830] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired lock "refresh_cache-d1e5e08d-b41a-4655-997d-91fbd3581f00" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 714.825090] env[69475]: DEBUG nova.network.neutron [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 714.833169] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507861, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.071636] env[69475]: DEBUG nova.virt.hardware [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 715.071964] env[69475]: DEBUG nova.virt.hardware [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 715.073283] env[69475]: DEBUG nova.virt.hardware [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 715.073283] env[69475]: DEBUG nova.virt.hardware [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 715.073283] env[69475]: DEBUG nova.virt.hardware [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 715.073283] env[69475]: DEBUG nova.virt.hardware [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 715.073564] env[69475]: DEBUG nova.virt.hardware [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 715.073763] env[69475]: DEBUG nova.virt.hardware [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 715.074111] env[69475]: DEBUG nova.virt.hardware [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 715.074328] env[69475]: DEBUG nova.virt.hardware [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 715.074601] env[69475]: DEBUG nova.virt.hardware [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 715.077772] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b23f434-a437-4320-b3eb-db5ee212b38c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.086218] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-977416cc-6607-4551-8381-1903a3f387c5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.105610] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:9d:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e23c1d18-c841-49ea-95f3-df5ceac28afd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2cb85199-de39-4837-a34d-c8ae33659f9b', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 715.113980] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 715.114329] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 715.114615] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1f1ac67a-9221-4941-a030-2955e2e438b8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.138835] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 715.138835] env[69475]: value = "task-3507862" [ 715.138835] env[69475]: _type = "Task" [ 715.138835] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.150956] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507862, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.243344] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524f4034-1992-b0b6-6635-f0aa38f1f6c2, 'name': SearchDatastore_Task, 'duration_secs': 0.010161} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.243741] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.244061] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 00ba5cd8-3516-4059-bcda-c2d01e165e07/00ba5cd8-3516-4059-bcda-c2d01e165e07.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 715.244418] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ddf3e65e-0913-4772-8c22-12a8fc4123d6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.254566] env[69475]: DEBUG oslo_concurrency.lockutils [None req-af7a629f-546a-4ea2-a3e4-91a72d69deed tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "93607154-f135-4925-9c3a-a97051535b00" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 715.255109] env[69475]: DEBUG oslo_concurrency.lockutils [None req-af7a629f-546a-4ea2-a3e4-91a72d69deed tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "93607154-f135-4925-9c3a-a97051535b00" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 715.257400] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Waiting for the task: (returnval){ [ 715.257400] env[69475]: value = "task-3507863" [ 715.257400] env[69475]: _type = "Task" [ 715.257400] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.269107] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507863, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.341020] env[69475]: DEBUG oslo_vmware.api [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507861, 'name': RemoveSnapshot_Task, 'duration_secs': 0.75371} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.341020] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 715.341020] env[69475]: INFO nova.compute.manager [None req-884d2ee8-9838-404f-928a-d9deb29a55e1 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Took 14.59 seconds to snapshot the instance on the hypervisor. [ 715.394129] env[69475]: DEBUG nova.network.neutron [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 715.640685] env[69475]: DEBUG nova.compute.manager [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 715.656184] env[69475]: DEBUG nova.network.neutron [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Updating instance_info_cache with network_info: [{"id": "5c37f4d6-0e34-4637-ac7c-73daa6f83e42", "address": "fa:16:3e:57:81:43", "network": {"id": "254ff636-2fcb-4d4c-b050-89948230fa0d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2128688610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1784f9c01de49c494bc44e0272c02cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c37f4d6-0e", "ovs_interfaceid": "5c37f4d6-0e34-4637-ac7c-73daa6f83e42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.666855] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507862, 'name': CreateVM_Task, 'duration_secs': 0.522595} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.667371] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 715.668097] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.668296] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 715.668637] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 715.669033] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7487b920-003f-4020-b4d8-8e9307010050 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.679888] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 715.679888] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52fd12ab-b2b6-9b16-cc01-48eaa73acd02" [ 715.679888] env[69475]: _type = "Task" [ 715.679888] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.690045] env[69475]: DEBUG nova.virt.hardware [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 715.690045] env[69475]: DEBUG nova.virt.hardware [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 715.690045] env[69475]: DEBUG nova.virt.hardware [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 715.690332] env[69475]: DEBUG nova.virt.hardware [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 715.690332] env[69475]: DEBUG nova.virt.hardware [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 715.690332] env[69475]: DEBUG nova.virt.hardware [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 715.690332] env[69475]: DEBUG nova.virt.hardware [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 715.690332] env[69475]: DEBUG nova.virt.hardware [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 715.690475] env[69475]: DEBUG nova.virt.hardware [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 715.690475] env[69475]: DEBUG nova.virt.hardware [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 715.690475] env[69475]: DEBUG nova.virt.hardware [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 715.690475] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edbe3863-5631-4542-a283-5da7042bba8a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.704667] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52fd12ab-b2b6-9b16-cc01-48eaa73acd02, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.710764] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c17a313-807c-4bd3-a24a-a9cc703e3751 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.759489] env[69475]: INFO nova.compute.manager [None req-af7a629f-546a-4ea2-a3e4-91a72d69deed tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Detaching volume 9dabd7ab-c858-4db7-b661-6e72ec39b32a [ 715.773859] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507863, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.810235] env[69475]: INFO nova.virt.block_device [None req-af7a629f-546a-4ea2-a3e4-91a72d69deed tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Attempting to driver detach volume 9dabd7ab-c858-4db7-b661-6e72ec39b32a from mountpoint /dev/sdb [ 715.810486] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-af7a629f-546a-4ea2-a3e4-91a72d69deed tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 715.810740] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-af7a629f-546a-4ea2-a3e4-91a72d69deed tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700930', 'volume_id': '9dabd7ab-c858-4db7-b661-6e72ec39b32a', 'name': 'volume-9dabd7ab-c858-4db7-b661-6e72ec39b32a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '93607154-f135-4925-9c3a-a97051535b00', 'attached_at': '', 'detached_at': '', 'volume_id': '9dabd7ab-c858-4db7-b661-6e72ec39b32a', 'serial': '9dabd7ab-c858-4db7-b661-6e72ec39b32a'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 715.811629] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a88d2d2-daf7-4ff7-8d50-207dcbd6334c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.839135] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44cf91b6-fa9c-4dcf-96c6-bdfdddea4c47 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.848641] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-471b9bdc-4964-49a5-9a2b-155c38e4faf7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.876203] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f98314fb-a0a4-42de-a53f-ce34c34f8880 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.892527] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-af7a629f-546a-4ea2-a3e4-91a72d69deed tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] The volume has not been displaced from its original location: [datastore2] volume-9dabd7ab-c858-4db7-b661-6e72ec39b32a/volume-9dabd7ab-c858-4db7-b661-6e72ec39b32a.vmdk. No consolidation needed. {{(pid=69475) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 715.897799] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-af7a629f-546a-4ea2-a3e4-91a72d69deed tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Reconfiguring VM instance instance-0000000f to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 715.901024] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc205636-1db3-4020-a693-ef8eec16f0ba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.920118] env[69475]: DEBUG oslo_vmware.api [None req-af7a629f-546a-4ea2-a3e4-91a72d69deed tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 715.920118] env[69475]: value = "task-3507864" [ 715.920118] env[69475]: _type = "Task" [ 715.920118] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.930042] env[69475]: DEBUG oslo_vmware.api [None req-af7a629f-546a-4ea2-a3e4-91a72d69deed tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507864, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.168248] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Releasing lock "refresh_cache-d1e5e08d-b41a-4655-997d-91fbd3581f00" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 716.168445] env[69475]: DEBUG nova.compute.manager [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Instance network_info: |[{"id": "5c37f4d6-0e34-4637-ac7c-73daa6f83e42", "address": "fa:16:3e:57:81:43", "network": {"id": "254ff636-2fcb-4d4c-b050-89948230fa0d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2128688610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1784f9c01de49c494bc44e0272c02cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c37f4d6-0e", "ovs_interfaceid": "5c37f4d6-0e34-4637-ac7c-73daa6f83e42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 716.168874] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:81:43', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31ac3fea-ebf4-4bed-bf70-1eaecdf71280', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c37f4d6-0e34-4637-ac7c-73daa6f83e42', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 716.179303] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 716.182117] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 716.182549] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5359debb-4496-4a9a-87e7-e9121677afbf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.211963] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52fd12ab-b2b6-9b16-cc01-48eaa73acd02, 'name': SearchDatastore_Task, 'duration_secs': 0.052671} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.215443] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 716.215658] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 716.215885] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.216047] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.216235] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 716.216471] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 716.216471] env[69475]: value = "task-3507865" [ 716.216471] env[69475]: _type = "Task" [ 716.216471] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.216828] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8ac0a3cf-2508-42e9-80ec-56f942c37ff9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.226582] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507865, 'name': CreateVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.228687] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 716.228907] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 716.229610] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11bc111c-bd69-435a-9441-ec0c69933939 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.235978] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 716.235978] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5243ecf7-43fa-31c9-15a5-225dc2e1bb49" [ 716.235978] env[69475]: _type = "Task" [ 716.235978] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.244608] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5243ecf7-43fa-31c9-15a5-225dc2e1bb49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.246726] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2137cae3-152e-4020-9ac3-c8f17f0c68f5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.254458] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90307615-7195-4abf-a9b4-a19231cd1d8f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.297285] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a487959c-6723-484a-8b72-c35953030541 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.303641] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507863, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541262} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.304254] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 00ba5cd8-3516-4059-bcda-c2d01e165e07/00ba5cd8-3516-4059-bcda-c2d01e165e07.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 716.304468] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 716.304712] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a9e393b7-265d-4542-b45d-a96a6e459929 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.309906] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf8b473-f116-4c46-aaa3-9e843c94ad92 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.315701] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Waiting for the task: (returnval){ [ 716.315701] env[69475]: value = "task-3507866" [ 716.315701] env[69475]: _type = "Task" [ 716.315701] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.336221] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507866, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.344669] env[69475]: DEBUG nova.compute.provider_tree [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.432175] env[69475]: DEBUG oslo_vmware.api [None req-af7a629f-546a-4ea2-a3e4-91a72d69deed tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507864, 'name': ReconfigVM_Task, 'duration_secs': 0.272639} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.432467] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-af7a629f-546a-4ea2-a3e4-91a72d69deed tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Reconfigured VM instance instance-0000000f to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 716.437230] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-723c5136-0185-4195-a4b1-f05ae900add1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.454216] env[69475]: DEBUG oslo_vmware.api [None req-af7a629f-546a-4ea2-a3e4-91a72d69deed tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 716.454216] env[69475]: value = "task-3507867" [ 716.454216] env[69475]: _type = "Task" [ 716.454216] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.465185] env[69475]: DEBUG oslo_vmware.api [None req-af7a629f-546a-4ea2-a3e4-91a72d69deed tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507867, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.474617] env[69475]: DEBUG nova.network.neutron [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Successfully updated port: 5abe617e-d18a-416f-8c40-d0da33a563d2 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 716.496246] env[69475]: DEBUG nova.compute.manager [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Received event network-vif-plugged-5c37f4d6-0e34-4637-ac7c-73daa6f83e42 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 716.497394] env[69475]: DEBUG oslo_concurrency.lockutils [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] Acquiring lock "d1e5e08d-b41a-4655-997d-91fbd3581f00-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 716.497724] env[69475]: DEBUG oslo_concurrency.lockutils [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] Lock "d1e5e08d-b41a-4655-997d-91fbd3581f00-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.497958] env[69475]: DEBUG oslo_concurrency.lockutils [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] Lock "d1e5e08d-b41a-4655-997d-91fbd3581f00-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.498160] env[69475]: DEBUG nova.compute.manager [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] No waiting events found dispatching network-vif-plugged-5c37f4d6-0e34-4637-ac7c-73daa6f83e42 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 716.498355] env[69475]: WARNING nova.compute.manager [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Received unexpected event network-vif-plugged-5c37f4d6-0e34-4637-ac7c-73daa6f83e42 for instance with vm_state building and task_state spawning. [ 716.498519] env[69475]: DEBUG nova.compute.manager [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Received event network-changed-5c37f4d6-0e34-4637-ac7c-73daa6f83e42 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 716.498675] env[69475]: DEBUG nova.compute.manager [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Refreshing instance network info cache due to event network-changed-5c37f4d6-0e34-4637-ac7c-73daa6f83e42. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 716.498952] env[69475]: DEBUG oslo_concurrency.lockutils [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] Acquiring lock "refresh_cache-d1e5e08d-b41a-4655-997d-91fbd3581f00" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.499154] env[69475]: DEBUG oslo_concurrency.lockutils [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] Acquired lock "refresh_cache-d1e5e08d-b41a-4655-997d-91fbd3581f00" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.499410] env[69475]: DEBUG nova.network.neutron [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Refreshing network info cache for port 5c37f4d6-0e34-4637-ac7c-73daa6f83e42 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 716.729751] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507865, 'name': CreateVM_Task, 'duration_secs': 0.396192} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.730103] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 716.730693] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.730932] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.731352] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 716.731630] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1345961e-d0d9-44ba-be92-a74cc69abeb0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.736973] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 716.736973] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e178ea-5007-8a80-51c0-efa162260ea6" [ 716.736973] env[69475]: _type = "Task" [ 716.736973] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.748551] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5243ecf7-43fa-31c9-15a5-225dc2e1bb49, 'name': SearchDatastore_Task, 'duration_secs': 0.01264} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.752167] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e178ea-5007-8a80-51c0-efa162260ea6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.752383] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96be6e96-9b01-4ce6-939c-cfac772a5f1c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.758677] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 716.758677] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520c65c8-0138-7933-6e9e-7031d8b80ae5" [ 716.758677] env[69475]: _type = "Task" [ 716.758677] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.767478] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520c65c8-0138-7933-6e9e-7031d8b80ae5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.826604] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507866, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.216296} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.826865] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 716.827638] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6fdb879-e966-49fe-9727-8c2f89b1989a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.849848] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] 00ba5cd8-3516-4059-bcda-c2d01e165e07/00ba5cd8-3516-4059-bcda-c2d01e165e07.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 716.850853] env[69475]: DEBUG nova.scheduler.client.report [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 716.856479] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-482c9983-6ca4-4900-81f6-dc72215b3ffe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.874634] env[69475]: DEBUG oslo_concurrency.lockutils [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.240s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 716.875151] env[69475]: DEBUG nova.compute.manager [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 716.877682] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.535s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 716.877836] env[69475]: DEBUG nova.objects.instance [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Lazy-loading 'resources' on Instance uuid 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 716.885626] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Waiting for the task: (returnval){ [ 716.885626] env[69475]: value = "task-3507868" [ 716.885626] env[69475]: _type = "Task" [ 716.885626] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.895181] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507868, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.964862] env[69475]: DEBUG oslo_vmware.api [None req-af7a629f-546a-4ea2-a3e4-91a72d69deed tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3507867, 'name': ReconfigVM_Task, 'duration_secs': 0.161942} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.965268] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-af7a629f-546a-4ea2-a3e4-91a72d69deed tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700930', 'volume_id': '9dabd7ab-c858-4db7-b661-6e72ec39b32a', 'name': 'volume-9dabd7ab-c858-4db7-b661-6e72ec39b32a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '93607154-f135-4925-9c3a-a97051535b00', 'attached_at': '', 'detached_at': '', 'volume_id': '9dabd7ab-c858-4db7-b661-6e72ec39b32a', 'serial': '9dabd7ab-c858-4db7-b661-6e72ec39b32a'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 716.975527] env[69475]: DEBUG oslo_concurrency.lockutils [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "refresh_cache-b41845c6-46bd-4b3b-ab26-d7d2dad08f84" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 716.975527] env[69475]: DEBUG oslo_concurrency.lockutils [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquired lock "refresh_cache-b41845c6-46bd-4b3b-ab26-d7d2dad08f84" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 716.975527] env[69475]: DEBUG nova.network.neutron [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 717.259348] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e178ea-5007-8a80-51c0-efa162260ea6, 'name': SearchDatastore_Task, 'duration_secs': 0.01222} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.259348] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.259348] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 717.259348] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.259708] env[69475]: DEBUG nova.network.neutron [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Updated VIF entry in instance network info cache for port 5c37f4d6-0e34-4637-ac7c-73daa6f83e42. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 717.260238] env[69475]: DEBUG nova.network.neutron [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Updating instance_info_cache with network_info: [{"id": "5c37f4d6-0e34-4637-ac7c-73daa6f83e42", "address": "fa:16:3e:57:81:43", "network": {"id": "254ff636-2fcb-4d4c-b050-89948230fa0d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2128688610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1784f9c01de49c494bc44e0272c02cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c37f4d6-0e", "ovs_interfaceid": "5c37f4d6-0e34-4637-ac7c-73daa6f83e42", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.273395] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520c65c8-0138-7933-6e9e-7031d8b80ae5, 'name': SearchDatastore_Task, 'duration_secs': 0.011922} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.274072] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.274072] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] ed12921f-9be8-474d-958e-79dd16b8116e/ed12921f-9be8-474d-958e-79dd16b8116e.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 717.274278] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 717.274443] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 717.274678] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c93ebe9-24b1-4b41-9a73-1c7b94444fef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.277235] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8482966d-4ca7-4d72-bf0e-ed9063e5ed04 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.286190] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 717.286190] env[69475]: value = "task-3507869" [ 717.286190] env[69475]: _type = "Task" [ 717.286190] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.291237] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 717.291390] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 717.292560] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3da8cade-d2e1-466e-acfc-f832a3e7336b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.299032] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507869, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.302694] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 717.302694] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52eaec65-40c2-b425-e5eb-b81b3b67fd80" [ 717.302694] env[69475]: _type = "Task" [ 717.302694] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.312148] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52eaec65-40c2-b425-e5eb-b81b3b67fd80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.384185] env[69475]: DEBUG nova.compute.utils [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 717.385714] env[69475]: DEBUG nova.compute.manager [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 717.385882] env[69475]: DEBUG nova.network.neutron [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 717.398643] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507868, 'name': ReconfigVM_Task, 'duration_secs': 0.335358} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.399493] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Reconfigured VM instance instance-00000023 to attach disk [datastore1] 00ba5cd8-3516-4059-bcda-c2d01e165e07/00ba5cd8-3516-4059-bcda-c2d01e165e07.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 717.400117] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd894534-e1ea-41a2-8c2e-ac7c9fb5e660 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.407850] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Waiting for the task: (returnval){ [ 717.407850] env[69475]: value = "task-3507870" [ 717.407850] env[69475]: _type = "Task" [ 717.407850] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.417368] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507870, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.441907] env[69475]: DEBUG nova.policy [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fc77aa810b24582ba0069952b28d1b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f2c2f5187934f5da108a1c96a3a3125', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 717.557019] env[69475]: DEBUG nova.objects.instance [None req-af7a629f-546a-4ea2-a3e4-91a72d69deed tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lazy-loading 'flavor' on Instance uuid 93607154-f135-4925-9c3a-a97051535b00 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 717.557845] env[69475]: DEBUG nova.network.neutron [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 717.765516] env[69475]: DEBUG oslo_concurrency.lockutils [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] Releasing lock "refresh_cache-d1e5e08d-b41a-4655-997d-91fbd3581f00" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 717.765516] env[69475]: DEBUG nova.compute.manager [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Received event network-vif-plugged-5abe617e-d18a-416f-8c40-d0da33a563d2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 717.765516] env[69475]: DEBUG oslo_concurrency.lockutils [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] Acquiring lock "b41845c6-46bd-4b3b-ab26-d7d2dad08f84-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.765516] env[69475]: DEBUG oslo_concurrency.lockutils [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] Lock "b41845c6-46bd-4b3b-ab26-d7d2dad08f84-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 717.765516] env[69475]: DEBUG oslo_concurrency.lockutils [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] Lock "b41845c6-46bd-4b3b-ab26-d7d2dad08f84-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.766011] env[69475]: DEBUG nova.compute.manager [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] No waiting events found dispatching network-vif-plugged-5abe617e-d18a-416f-8c40-d0da33a563d2 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 717.766011] env[69475]: WARNING nova.compute.manager [req-1708a614-4cfe-45e5-abc8-c452c51dbb23 req-4ad0c391-72c7-4543-a0e5-5e570a3a911f service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Received unexpected event network-vif-plugged-5abe617e-d18a-416f-8c40-d0da33a563d2 for instance with vm_state building and task_state spawning. [ 717.914921] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507869, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.914921] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52eaec65-40c2-b425-e5eb-b81b3b67fd80, 'name': SearchDatastore_Task, 'duration_secs': 0.031278} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.914921] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-545d9817-9d36-4659-aa46-752aaed802b6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.914921] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 717.914921] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529ed2fd-d559-1674-2703-d7ecedc54d2c" [ 717.914921] env[69475]: _type = "Task" [ 717.914921] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.916330] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529ed2fd-d559-1674-2703-d7ecedc54d2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.916330] env[69475]: DEBUG nova.compute.manager [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 717.917025] env[69475]: DEBUG nova.network.neutron [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Updating instance_info_cache with network_info: [{"id": "5abe617e-d18a-416f-8c40-d0da33a563d2", "address": "fa:16:3e:8a:cb:f9", "network": {"id": "4ef94f8a-061f-46ae-9a81-2ea0b3db007a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1623741139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6fe52710b9d1461ea46698c9cf7bafb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad36dd36-1d2c-4f37-a259-98ef2e440794", "external-id": "nsx-vlan-transportzone-479", "segmentation_id": 479, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5abe617e-d1", "ovs_interfaceid": "5abe617e-d18a-416f-8c40-d0da33a563d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.920868] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507870, 'name': Rename_Task, 'duration_secs': 0.211565} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.921410] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 717.921657] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e337618a-f5c9-4857-9a07-0086d3a55b2f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.932188] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Waiting for the task: (returnval){ [ 717.932188] env[69475]: value = "task-3507871" [ 717.932188] env[69475]: _type = "Task" [ 717.932188] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.944850] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507871, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.117273] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6632433a-2dfd-432c-a598-573b3bc01b17 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.126177] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58efa044-cabc-4afd-b40c-fe02ec14112c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.158566] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09caa55f-4707-49e7-8478-c93730693840 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.167231] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b240aa1-8898-4505-9d13-1786fe1f553f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.174605] env[69475]: DEBUG nova.network.neutron [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Successfully created port: c128b3a3-0907-4414-9416-ff89769ff3b1 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 718.183977] env[69475]: DEBUG nova.compute.provider_tree [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.263225] env[69475]: DEBUG nova.compute.manager [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 718.264350] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa60584e-9d24-4b30-b214-7a93d2c172f5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.297885] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507869, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.545164} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.298156] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] ed12921f-9be8-474d-958e-79dd16b8116e/ed12921f-9be8-474d-958e-79dd16b8116e.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 718.298360] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 718.298613] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-33a1f794-83fb-4340-a7dd-5412d2b7168f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.311075] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 718.311075] env[69475]: value = "task-3507872" [ 718.311075] env[69475]: _type = "Task" [ 718.311075] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.338536] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507872, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.357029] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529ed2fd-d559-1674-2703-d7ecedc54d2c, 'name': SearchDatastore_Task, 'duration_secs': 0.009921} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.357029] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.357029] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] d1e5e08d-b41a-4655-997d-91fbd3581f00/d1e5e08d-b41a-4655-997d-91fbd3581f00.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 718.357029] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb716960-53a6-48c5-9818-4993444007cd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.364214] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 718.364214] env[69475]: value = "task-3507873" [ 718.364214] env[69475]: _type = "Task" [ 718.364214] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.372592] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507873, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.425026] env[69475]: DEBUG oslo_concurrency.lockutils [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Releasing lock "refresh_cache-b41845c6-46bd-4b3b-ab26-d7d2dad08f84" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 718.425026] env[69475]: DEBUG nova.compute.manager [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Instance network_info: |[{"id": "5abe617e-d18a-416f-8c40-d0da33a563d2", "address": "fa:16:3e:8a:cb:f9", "network": {"id": "4ef94f8a-061f-46ae-9a81-2ea0b3db007a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1623741139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6fe52710b9d1461ea46698c9cf7bafb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad36dd36-1d2c-4f37-a259-98ef2e440794", "external-id": "nsx-vlan-transportzone-479", "segmentation_id": 479, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5abe617e-d1", "ovs_interfaceid": "5abe617e-d18a-416f-8c40-d0da33a563d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 718.425363] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:cb:f9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad36dd36-1d2c-4f37-a259-98ef2e440794', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5abe617e-d18a-416f-8c40-d0da33a563d2', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 718.432766] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Creating folder: Project (6fe52710b9d1461ea46698c9cf7bafb2). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 718.433570] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7ef53614-eea0-4659-9571-563eb774c2c6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.447447] env[69475]: DEBUG oslo_vmware.api [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507871, 'name': PowerOnVM_Task, 'duration_secs': 0.507713} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.447875] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 718.448259] env[69475]: INFO nova.compute.manager [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Took 8.13 seconds to spawn the instance on the hypervisor. [ 718.449767] env[69475]: DEBUG nova.compute.manager [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 718.449767] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35603bab-f8da-48e1-9eac-d96bae078baf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.454193] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Created folder: Project (6fe52710b9d1461ea46698c9cf7bafb2) in parent group-v700823. [ 718.454412] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Creating folder: Instances. Parent ref: group-v700936. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 718.455058] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5c7fb69-2284-468b-a3ef-542d264a8dfa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.466557] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Created folder: Instances in parent group-v700936. [ 718.466557] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 718.466557] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 718.466682] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-74f67fd4-b2da-4ae4-8fe4-b27d4180a462 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.490135] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 718.490135] env[69475]: value = "task-3507876" [ 718.490135] env[69475]: _type = "Task" [ 718.490135] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.501709] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507876, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.568313] env[69475]: DEBUG oslo_concurrency.lockutils [None req-af7a629f-546a-4ea2-a3e4-91a72d69deed tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "93607154-f135-4925-9c3a-a97051535b00" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.313s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.613338] env[69475]: DEBUG nova.compute.manager [req-87d66adb-2069-4085-b846-7a53b708a04b req-371c7cab-4f8e-4a59-a716-918df7bb9dbf service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Received event network-changed-5abe617e-d18a-416f-8c40-d0da33a563d2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 718.613452] env[69475]: DEBUG nova.compute.manager [req-87d66adb-2069-4085-b846-7a53b708a04b req-371c7cab-4f8e-4a59-a716-918df7bb9dbf service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Refreshing instance network info cache due to event network-changed-5abe617e-d18a-416f-8c40-d0da33a563d2. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 718.613636] env[69475]: DEBUG oslo_concurrency.lockutils [req-87d66adb-2069-4085-b846-7a53b708a04b req-371c7cab-4f8e-4a59-a716-918df7bb9dbf service nova] Acquiring lock "refresh_cache-b41845c6-46bd-4b3b-ab26-d7d2dad08f84" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.613801] env[69475]: DEBUG oslo_concurrency.lockutils [req-87d66adb-2069-4085-b846-7a53b708a04b req-371c7cab-4f8e-4a59-a716-918df7bb9dbf service nova] Acquired lock "refresh_cache-b41845c6-46bd-4b3b-ab26-d7d2dad08f84" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 718.613964] env[69475]: DEBUG nova.network.neutron [req-87d66adb-2069-4085-b846-7a53b708a04b req-371c7cab-4f8e-4a59-a716-918df7bb9dbf service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Refreshing network info cache for port 5abe617e-d18a-416f-8c40-d0da33a563d2 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 718.640516] env[69475]: DEBUG nova.network.neutron [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Successfully created port: 2d65f58d-2734-4d3f-b996-8feba5cd4ad2 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 718.686949] env[69475]: DEBUG nova.scheduler.client.report [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 718.776854] env[69475]: INFO nova.compute.manager [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] instance snapshotting [ 718.779992] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fcf6584-8b59-4868-9712-d892df665bdd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.809171] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81c7ad3c-d1b6-4d2b-87a6-142857872d66 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.824496] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507872, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.119726} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 718.827935] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 718.829757] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef63ac18-5c59-4a41-8bbd-0729886456c8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.858559] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] ed12921f-9be8-474d-958e-79dd16b8116e/ed12921f-9be8-474d-958e-79dd16b8116e.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 718.858918] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe945248-d551-4a19-8ec2-046916166dab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.888979] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507873, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.891775] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 718.891775] env[69475]: value = "task-3507877" [ 718.891775] env[69475]: _type = "Task" [ 718.891775] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 718.901042] env[69475]: DEBUG nova.compute.manager [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 718.903133] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507877, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 718.925738] env[69475]: DEBUG nova.virt.hardware [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 718.926054] env[69475]: DEBUG nova.virt.hardware [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 718.926292] env[69475]: DEBUG nova.virt.hardware [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 718.926505] env[69475]: DEBUG nova.virt.hardware [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 718.926663] env[69475]: DEBUG nova.virt.hardware [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 718.926823] env[69475]: DEBUG nova.virt.hardware [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 718.927047] env[69475]: DEBUG nova.virt.hardware [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 718.927213] env[69475]: DEBUG nova.virt.hardware [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 718.927415] env[69475]: DEBUG nova.virt.hardware [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 718.927586] env[69475]: DEBUG nova.virt.hardware [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 718.927760] env[69475]: DEBUG nova.virt.hardware [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 718.928785] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb09bf8-4ad3-4aab-8c59-72b81c977a0a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.937153] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc5b91f-3251-4b53-8f30-c3d30a2370d3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.975132] env[69475]: INFO nova.compute.manager [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Took 42.79 seconds to build instance. [ 719.002388] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507876, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.121309] env[69475]: DEBUG nova.network.neutron [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Successfully created port: 117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 719.192352] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.315s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.194936] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.750s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.195428] env[69475]: DEBUG nova.objects.instance [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Lazy-loading 'resources' on Instance uuid 8f65d893-d2e2-452f-8870-f72ec036f16a {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 719.224238] env[69475]: INFO nova.scheduler.client.report [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Deleted allocations for instance 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de [ 719.330157] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 719.330737] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-196274ed-a6d6-40e1-8879-0f6ae2dba586 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.339188] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the task: (returnval){ [ 719.339188] env[69475]: value = "task-3507878" [ 719.339188] env[69475]: _type = "Task" [ 719.339188] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.348465] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507878, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.386585] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507873, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.562886} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.386845] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] d1e5e08d-b41a-4655-997d-91fbd3581f00/d1e5e08d-b41a-4655-997d-91fbd3581f00.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 719.387095] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 719.387353] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fe4cbd43-11f9-4fe0-9560-0f99547f9847 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.400403] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 719.400403] env[69475]: value = "task-3507879" [ 719.400403] env[69475]: _type = "Task" [ 719.400403] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.412317] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507877, 'name': ReconfigVM_Task, 'duration_secs': 0.317116} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.413171] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Reconfigured VM instance instance-00000021 to attach disk [datastore1] ed12921f-9be8-474d-958e-79dd16b8116e/ed12921f-9be8-474d-958e-79dd16b8116e.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 719.413881] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-93755f85-da43-4b2e-9b3b-7a2c6df72fec {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.418312] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507879, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.424826] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 719.424826] env[69475]: value = "task-3507880" [ 719.424826] env[69475]: _type = "Task" [ 719.424826] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.434537] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507880, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.478895] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13f66a31-84ad-4b21-b094-ba04ba3a1e8c tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Lock "00ba5cd8-3516-4059-bcda-c2d01e165e07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.249s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.503193] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507876, 'name': CreateVM_Task, 'duration_secs': 0.662091} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.503423] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 719.504044] env[69475]: DEBUG oslo_concurrency.lockutils [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 719.504224] env[69475]: DEBUG oslo_concurrency.lockutils [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 719.504579] env[69475]: DEBUG oslo_concurrency.lockutils [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 719.504828] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9732b95c-cdf2-446d-a05a-80ad41043671 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.510217] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 719.510217] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e18946-cdf9-a7d5-99db-c138dd653a68" [ 719.510217] env[69475]: _type = "Task" [ 719.510217] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.523281] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e18946-cdf9-a7d5-99db-c138dd653a68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.602241] env[69475]: DEBUG nova.network.neutron [req-87d66adb-2069-4085-b846-7a53b708a04b req-371c7cab-4f8e-4a59-a716-918df7bb9dbf service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Updated VIF entry in instance network info cache for port 5abe617e-d18a-416f-8c40-d0da33a563d2. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 719.602241] env[69475]: DEBUG nova.network.neutron [req-87d66adb-2069-4085-b846-7a53b708a04b req-371c7cab-4f8e-4a59-a716-918df7bb9dbf service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Updating instance_info_cache with network_info: [{"id": "5abe617e-d18a-416f-8c40-d0da33a563d2", "address": "fa:16:3e:8a:cb:f9", "network": {"id": "4ef94f8a-061f-46ae-9a81-2ea0b3db007a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1623741139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "6fe52710b9d1461ea46698c9cf7bafb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad36dd36-1d2c-4f37-a259-98ef2e440794", "external-id": "nsx-vlan-transportzone-479", "segmentation_id": 479, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5abe617e-d1", "ovs_interfaceid": "5abe617e-d18a-416f-8c40-d0da33a563d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.742349] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63dcf167-2917-412c-b116-e1715d118648 tempest-VolumesAssistedSnapshotsTest-551197811 tempest-VolumesAssistedSnapshotsTest-551197811-project-member] Lock "9cfd8425-c1aa-4dbc-afa4-3a5aa10428de" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.452s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.746725] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "2e7066ca-162e-4465-a9c1-5422510e4c0f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.746725] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "2e7066ca-162e-4465-a9c1-5422510e4c0f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.850291] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507878, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 719.914493] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507879, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082223} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.914807] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 719.915621] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dcf3bc8-7fd9-44c9-b343-7b507cece1d1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.949788] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] d1e5e08d-b41a-4655-997d-91fbd3581f00/d1e5e08d-b41a-4655-997d-91fbd3581f00.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 719.954308] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89978b98-a53c-4b91-998e-87657ae62ff1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.975850] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507880, 'name': Rename_Task, 'duration_secs': 0.167942} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 719.977378] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 719.977964] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 719.977964] env[69475]: value = "task-3507881" [ 719.977964] env[69475]: _type = "Task" [ 719.977964] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 719.977964] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7147abca-dd6b-4352-b9c9-934be872de06 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.986231] env[69475]: DEBUG nova.compute.manager [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 719.997594] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 719.997594] env[69475]: value = "task-3507882" [ 719.997594] env[69475]: _type = "Task" [ 719.997594] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.007019] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507881, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.012876] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507882, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.027045] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e18946-cdf9-a7d5-99db-c138dd653a68, 'name': SearchDatastore_Task, 'duration_secs': 0.012755} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.027321] env[69475]: DEBUG oslo_concurrency.lockutils [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.028558] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 720.028558] env[69475]: DEBUG oslo_concurrency.lockutils [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 720.028558] env[69475]: DEBUG oslo_concurrency.lockutils [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 720.028558] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 720.028558] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-da990b0c-4b3e-4029-97d4-ec7468e997a3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.042094] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 720.042094] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 720.042094] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3351ff9c-e83e-4cfd-90d2-c69b0ed531a2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.049671] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 720.049671] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d5759b-6e84-2114-42ce-b8b64f89fcc5" [ 720.049671] env[69475]: _type = "Task" [ 720.049671] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.065683] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d5759b-6e84-2114-42ce-b8b64f89fcc5, 'name': SearchDatastore_Task} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.066661] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8a64e16-4dff-4fd9-aff7-e4a999ce328c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.076317] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 720.076317] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a5400d-3f14-b890-9203-e2a438990a23" [ 720.076317] env[69475]: _type = "Task" [ 720.076317] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.084505] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a5400d-3f14-b890-9203-e2a438990a23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.108014] env[69475]: DEBUG oslo_concurrency.lockutils [req-87d66adb-2069-4085-b846-7a53b708a04b req-371c7cab-4f8e-4a59-a716-918df7bb9dbf service nova] Releasing lock "refresh_cache-b41845c6-46bd-4b3b-ab26-d7d2dad08f84" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.313030] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5486f71d-3fac-41ea-a0e9-8507b0e7c0b8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.335682] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77711e62-7a39-487e-ae0e-55ac92786845 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.373850] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34350aaa-ca5d-49ae-a593-b953b9f2aab3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.376620] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507878, 'name': CreateSnapshot_Task, 'duration_secs': 0.71326} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.376885] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 720.378097] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b17773da-3a92-4941-9bc5-622a407cf1fe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.384517] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-253c040b-0736-4380-a674-f1a0b73391dd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.407342] env[69475]: DEBUG nova.compute.provider_tree [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 720.490113] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507881, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.517462] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507882, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.528712] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.588110] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a5400d-3f14-b890-9203-e2a438990a23, 'name': SearchDatastore_Task, 'duration_secs': 0.028932} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.588372] env[69475]: DEBUG oslo_concurrency.lockutils [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 720.588627] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] b41845c6-46bd-4b3b-ab26-d7d2dad08f84/b41845c6-46bd-4b3b-ab26-d7d2dad08f84.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 720.588882] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3c6ed672-062e-4dba-8296-f098563dc615 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.596149] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 720.596149] env[69475]: value = "task-3507883" [ 720.596149] env[69475]: _type = "Task" [ 720.596149] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.604766] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3507883, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.908175] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 720.908175] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-58214fab-41c9-4cad-b195-8d04fb39fe2f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.911400] env[69475]: DEBUG nova.scheduler.client.report [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 720.923775] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the task: (returnval){ [ 720.923775] env[69475]: value = "task-3507884" [ 720.923775] env[69475]: _type = "Task" [ 720.923775] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.937922] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507884, 'name': CloneVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.992792] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507881, 'name': ReconfigVM_Task, 'duration_secs': 0.54875} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.993493] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Reconfigured VM instance instance-00000022 to attach disk [datastore1] d1e5e08d-b41a-4655-997d-91fbd3581f00/d1e5e08d-b41a-4655-997d-91fbd3581f00.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 720.996049] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d48f08c6-8076-4d44-a8b1-15341a850d89 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.007272] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 721.007272] env[69475]: value = "task-3507885" [ 721.007272] env[69475]: _type = "Task" [ 721.007272] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.019079] env[69475]: DEBUG oslo_vmware.api [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507882, 'name': PowerOnVM_Task, 'duration_secs': 0.557447} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.019079] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 721.019079] env[69475]: DEBUG nova.compute.manager [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 721.019079] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059fccd1-21ce-4ab1-967f-06ca12875ffa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.024437] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507885, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.115665] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3507883, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.420666] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.223s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.422749] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.651s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.423275] env[69475]: DEBUG nova.objects.instance [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lazy-loading 'resources' on Instance uuid fa2ca135-3cd2-411e-b1fc-35b93a97e75d {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 721.440019] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507884, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.466428] env[69475]: INFO nova.scheduler.client.report [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Deleted allocations for instance 8f65d893-d2e2-452f-8870-f72ec036f16a [ 721.530696] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507885, 'name': Rename_Task, 'duration_secs': 0.303334} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.531195] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 721.531628] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-477d942d-745f-47fd-9009-1f0ce9e624ba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.548414] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.552549] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 721.552549] env[69475]: value = "task-3507886" [ 721.552549] env[69475]: _type = "Task" [ 721.552549] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.565610] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507886, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.607970] env[69475]: DEBUG nova.compute.manager [req-d12c8c89-c27c-4756-81ba-a20a9051cb98 req-ec27adf7-4c21-4dc5-a814-6d3efedf2fe7 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Received event network-vif-plugged-c128b3a3-0907-4414-9416-ff89769ff3b1 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 721.608292] env[69475]: DEBUG oslo_concurrency.lockutils [req-d12c8c89-c27c-4756-81ba-a20a9051cb98 req-ec27adf7-4c21-4dc5-a814-6d3efedf2fe7 service nova] Acquiring lock "2dd98ffd-b0e6-4447-9c82-57713dc37abd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.608583] env[69475]: DEBUG oslo_concurrency.lockutils [req-d12c8c89-c27c-4756-81ba-a20a9051cb98 req-ec27adf7-4c21-4dc5-a814-6d3efedf2fe7 service nova] Lock "2dd98ffd-b0e6-4447-9c82-57713dc37abd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.608815] env[69475]: DEBUG oslo_concurrency.lockutils [req-d12c8c89-c27c-4756-81ba-a20a9051cb98 req-ec27adf7-4c21-4dc5-a814-6d3efedf2fe7 service nova] Lock "2dd98ffd-b0e6-4447-9c82-57713dc37abd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.609075] env[69475]: DEBUG nova.compute.manager [req-d12c8c89-c27c-4756-81ba-a20a9051cb98 req-ec27adf7-4c21-4dc5-a814-6d3efedf2fe7 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] No waiting events found dispatching network-vif-plugged-c128b3a3-0907-4414-9416-ff89769ff3b1 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 721.609322] env[69475]: WARNING nova.compute.manager [req-d12c8c89-c27c-4756-81ba-a20a9051cb98 req-ec27adf7-4c21-4dc5-a814-6d3efedf2fe7 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Received unexpected event network-vif-plugged-c128b3a3-0907-4414-9416-ff89769ff3b1 for instance with vm_state building and task_state spawning. [ 721.619063] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3507883, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.746864} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 721.619415] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] b41845c6-46bd-4b3b-ab26-d7d2dad08f84/b41845c6-46bd-4b3b-ab26-d7d2dad08f84.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 721.619716] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 721.620066] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a7a74e43-f39d-47d9-8eb9-7e4a2c00506c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.634023] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 721.634023] env[69475]: value = "task-3507887" [ 721.634023] env[69475]: _type = "Task" [ 721.634023] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 721.648888] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3507887, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.692625] env[69475]: DEBUG nova.network.neutron [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Successfully updated port: c128b3a3-0907-4414-9416-ff89769ff3b1 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 721.944009] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507884, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 721.983020] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13165910-670f-4e0b-98e8-3dec814f5fac tempest-FloatingIPsAssociationNegativeTestJSON-687438492 tempest-FloatingIPsAssociationNegativeTestJSON-687438492-project-member] Lock "8f65d893-d2e2-452f-8870-f72ec036f16a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.533s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.066563] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507886, 'name': PowerOnVM_Task} progress is 71%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.142760] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3507887, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.098486} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.143903] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 722.144184] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6dfafff-d6b9-4a09-b0ab-f2d516f53cc9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.175359] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] b41845c6-46bd-4b3b-ab26-d7d2dad08f84/b41845c6-46bd-4b3b-ab26-d7d2dad08f84.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 722.178768] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd91df6f-6ca4-4644-bb16-4334eeaa85e8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.205191] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 722.205191] env[69475]: value = "task-3507888" [ 722.205191] env[69475]: _type = "Task" [ 722.205191] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.222053] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3507888, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.446181] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507884, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.570086] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507886, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.687419] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c229912e-c58a-400d-aa89-86e4f0de5057 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.698541] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d2f6c5-4397-41a6-8275-a8b9a3274e65 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.741760] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d395702-07e0-4382-a5d8-0166c25e4afd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.748384] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3507888, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.758360] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24fe679-08e5-4e46-b6cf-e3110b7dcdd6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.775642] env[69475]: DEBUG nova.compute.provider_tree [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 722.943618] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507884, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.067694] env[69475]: DEBUG oslo_vmware.api [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3507886, 'name': PowerOnVM_Task, 'duration_secs': 1.477645} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.068084] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 723.068367] env[69475]: INFO nova.compute.manager [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Took 10.10 seconds to spawn the instance on the hypervisor. [ 723.068637] env[69475]: DEBUG nova.compute.manager [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 723.069608] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6326cc18-20dc-42b4-8803-ebbe92d063ed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.218080] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3507888, 'name': ReconfigVM_Task, 'duration_secs': 0.846136} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.218694] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Reconfigured VM instance instance-00000024 to attach disk [datastore1] b41845c6-46bd-4b3b-ab26-d7d2dad08f84/b41845c6-46bd-4b3b-ab26-d7d2dad08f84.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 723.219417] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f0ac4bb7-1ad3-4d0b-9df6-247eb196ea92 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.227864] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 723.227864] env[69475]: value = "task-3507889" [ 723.227864] env[69475]: _type = "Task" [ 723.227864] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.237565] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3507889, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.281420] env[69475]: DEBUG nova.scheduler.client.report [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 723.301089] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "ed12921f-9be8-474d-958e-79dd16b8116e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.301385] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "ed12921f-9be8-474d-958e-79dd16b8116e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.301605] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "ed12921f-9be8-474d-958e-79dd16b8116e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.301812] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "ed12921f-9be8-474d-958e-79dd16b8116e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.302062] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "ed12921f-9be8-474d-958e-79dd16b8116e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 723.304421] env[69475]: INFO nova.compute.manager [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Terminating instance [ 723.441675] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507884, 'name': CloneVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.595400] env[69475]: INFO nova.compute.manager [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Took 46.84 seconds to build instance. [ 723.738959] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3507889, 'name': Rename_Task, 'duration_secs': 0.186073} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.738959] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 723.738959] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-78269ef5-8c14-44ce-9ea5-3da06d9bc74e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.750129] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 723.750129] env[69475]: value = "task-3507890" [ 723.750129] env[69475]: _type = "Task" [ 723.750129] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.759342] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3507890, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.788362] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.365s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 723.792291] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.325s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.793730] env[69475]: INFO nova.compute.claims [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 723.812022] env[69475]: DEBUG nova.compute.manager [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 723.812022] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 723.812022] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89ab4327-a9cc-425a-bc6d-bae86d906b8f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.822020] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 723.822644] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e4e8b6ad-3550-440e-9e13-393b6311c17c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.826727] env[69475]: INFO nova.scheduler.client.report [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleted allocations for instance fa2ca135-3cd2-411e-b1fc-35b93a97e75d [ 723.835140] env[69475]: DEBUG oslo_vmware.api [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 723.835140] env[69475]: value = "task-3507891" [ 723.835140] env[69475]: _type = "Task" [ 723.835140] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.844454] env[69475]: DEBUG oslo_vmware.api [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507891, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.946874] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507884, 'name': CloneVM_Task, 'duration_secs': 2.58073} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.947454] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Created linked-clone VM from snapshot [ 723.952765] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4872ebf5-5c7d-4777-b183-d7235504b829 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.960553] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Uploading image 609f948e-c80f-49cc-9305-9d24cccf9d02 {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 723.998348] env[69475]: DEBUG oslo_vmware.rw_handles [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 723.998348] env[69475]: value = "vm-700940" [ 723.998348] env[69475]: _type = "VirtualMachine" [ 723.998348] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 724.000644] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-02b74e4a-3c75-4e6b-b623-d21efc24ec59 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.006733] env[69475]: DEBUG oslo_vmware.rw_handles [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Lease: (returnval){ [ 724.006733] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f71e19-b53c-d715-bbc6-a8487266b748" [ 724.006733] env[69475]: _type = "HttpNfcLease" [ 724.006733] env[69475]: } obtained for exporting VM: (result){ [ 724.006733] env[69475]: value = "vm-700940" [ 724.006733] env[69475]: _type = "VirtualMachine" [ 724.006733] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 724.007172] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the lease: (returnval){ [ 724.007172] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f71e19-b53c-d715-bbc6-a8487266b748" [ 724.007172] env[69475]: _type = "HttpNfcLease" [ 724.007172] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 724.018766] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 724.018766] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f71e19-b53c-d715-bbc6-a8487266b748" [ 724.018766] env[69475]: _type = "HttpNfcLease" [ 724.018766] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 724.098073] env[69475]: DEBUG oslo_concurrency.lockutils [None req-edc0ef5d-5fb9-43ec-a6c8-2ceb2d90c617 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "d1e5e08d-b41a-4655-997d-91fbd3581f00" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.854s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.238185] env[69475]: DEBUG nova.compute.manager [req-3887ccac-7b00-44d1-9ab0-8faeef0e344c req-110b55ef-1648-438b-9a34-bf90d09f2a62 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Received event network-changed-dd8084ea-8138-439f-a367-0e57562094f5 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 724.238263] env[69475]: DEBUG nova.compute.manager [req-3887ccac-7b00-44d1-9ab0-8faeef0e344c req-110b55ef-1648-438b-9a34-bf90d09f2a62 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Refreshing instance network info cache due to event network-changed-dd8084ea-8138-439f-a367-0e57562094f5. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 724.238549] env[69475]: DEBUG oslo_concurrency.lockutils [req-3887ccac-7b00-44d1-9ab0-8faeef0e344c req-110b55ef-1648-438b-9a34-bf90d09f2a62 service nova] Acquiring lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 724.238760] env[69475]: DEBUG oslo_concurrency.lockutils [req-3887ccac-7b00-44d1-9ab0-8faeef0e344c req-110b55ef-1648-438b-9a34-bf90d09f2a62 service nova] Acquired lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 724.239015] env[69475]: DEBUG nova.network.neutron [req-3887ccac-7b00-44d1-9ab0-8faeef0e344c req-110b55ef-1648-438b-9a34-bf90d09f2a62 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Refreshing network info cache for port dd8084ea-8138-439f-a367-0e57562094f5 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 724.261210] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3507890, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.345561] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f9a00987-e7e8-4ca6-8b89-7978a548b815 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "fa2ca135-3cd2-411e-b1fc-35b93a97e75d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.027s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.351109] env[69475]: DEBUG oslo_vmware.api [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507891, 'name': PowerOffVM_Task, 'duration_secs': 0.249413} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.351701] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 724.351701] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 724.351930] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-584c1d88-6244-428b-8869-dbfbdf7854d8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.415256] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 724.415646] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 724.415781] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleting the datastore file [datastore1] ed12921f-9be8-474d-958e-79dd16b8116e {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 724.416185] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-182e7255-e7e2-4020-8661-d52b10c83ff5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.431362] env[69475]: DEBUG oslo_vmware.api [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 724.431362] env[69475]: value = "task-3507894" [ 724.431362] env[69475]: _type = "Task" [ 724.431362] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.447622] env[69475]: DEBUG oslo_vmware.api [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507894, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.509487] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "86464a01-e034-43b6-a6d5-45f9e3b6715b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 724.509719] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "86464a01-e034-43b6-a6d5-45f9e3b6715b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.517043] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 724.517043] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f71e19-b53c-d715-bbc6-a8487266b748" [ 724.517043] env[69475]: _type = "HttpNfcLease" [ 724.517043] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 724.517356] env[69475]: DEBUG oslo_vmware.rw_handles [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 724.517356] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f71e19-b53c-d715-bbc6-a8487266b748" [ 724.517356] env[69475]: _type = "HttpNfcLease" [ 724.517356] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 724.518305] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-289f9be4-aa82-4508-8be1-28f114f56d33 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.525516] env[69475]: DEBUG oslo_vmware.rw_handles [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523fd40b-535d-d749-6494-485499b8903f/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 724.525688] env[69475]: DEBUG oslo_vmware.rw_handles [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523fd40b-535d-d749-6494-485499b8903f/disk-0.vmdk for reading. {{(pid=69475) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 724.604964] env[69475]: DEBUG nova.compute.manager [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 724.628154] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5fc27287-1e49-4614-82ee-49e5c4991988 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.682442] env[69475]: DEBUG nova.network.neutron [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Successfully updated port: 2d65f58d-2734-4d3f-b996-8feba5cd4ad2 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 724.765873] env[69475]: DEBUG oslo_vmware.api [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3507890, 'name': PowerOnVM_Task, 'duration_secs': 0.742127} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.766405] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 724.766645] env[69475]: INFO nova.compute.manager [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Took 9.12 seconds to spawn the instance on the hypervisor. [ 724.766966] env[69475]: DEBUG nova.compute.manager [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 724.768201] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034fd664-c6d9-4322-a3b2-6b9142906968 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.954532] env[69475]: DEBUG oslo_vmware.api [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3507894, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160259} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.958323] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 724.958323] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 724.958323] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 724.958323] env[69475]: INFO nova.compute.manager [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Took 1.15 seconds to destroy the instance on the hypervisor. [ 724.958323] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 724.958851] env[69475]: DEBUG nova.compute.manager [-] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 724.958851] env[69475]: DEBUG nova.network.neutron [-] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 725.142888] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 725.298156] env[69475]: INFO nova.compute.manager [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Took 47.44 seconds to build instance. [ 725.479784] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-744515e4-189f-4d6b-b89c-77ba8b3ebb6a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.488771] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ca6bae-2e31-4983-8576-457c173f0359 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.522797] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7a23b6d-974d-4942-9374-c5ee8b4fb9d4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.528163] env[69475]: DEBUG nova.network.neutron [req-3887ccac-7b00-44d1-9ab0-8faeef0e344c req-110b55ef-1648-438b-9a34-bf90d09f2a62 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Updated VIF entry in instance network info cache for port dd8084ea-8138-439f-a367-0e57562094f5. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 725.528504] env[69475]: DEBUG nova.network.neutron [req-3887ccac-7b00-44d1-9ab0-8faeef0e344c req-110b55ef-1648-438b-9a34-bf90d09f2a62 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Updating instance_info_cache with network_info: [{"id": "dd8084ea-8138-439f-a367-0e57562094f5", "address": "fa:16:3e:be:9d:ab", "network": {"id": "e8c8708c-443c-4213-8716-88b361366d50", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-13457515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d041345f126f4ad69469a2771e411ce9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1520c99-af74-4d61-a8ae-56aef56ef4f0", "external-id": "nsx-vlan-transportzone-891", "segmentation_id": 891, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd8084ea-81", "ovs_interfaceid": "dd8084ea-8138-439f-a367-0e57562094f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.535198] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad44dd57-5eca-40b5-8a70-44bb110417ca {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.547666] env[69475]: DEBUG nova.compute.provider_tree [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.802778] env[69475]: DEBUG oslo_concurrency.lockutils [None req-97289a1c-f908-4df1-be33-75b1719b9cdf tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "b41845c6-46bd-4b3b-ab26-d7d2dad08f84" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.231s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.034118] env[69475]: DEBUG oslo_concurrency.lockutils [req-3887ccac-7b00-44d1-9ab0-8faeef0e344c req-110b55ef-1648-438b-9a34-bf90d09f2a62 service nova] Releasing lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 726.034338] env[69475]: DEBUG nova.compute.manager [req-3887ccac-7b00-44d1-9ab0-8faeef0e344c req-110b55ef-1648-438b-9a34-bf90d09f2a62 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Received event network-changed-c128b3a3-0907-4414-9416-ff89769ff3b1 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 726.034603] env[69475]: DEBUG nova.compute.manager [req-3887ccac-7b00-44d1-9ab0-8faeef0e344c req-110b55ef-1648-438b-9a34-bf90d09f2a62 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Refreshing instance network info cache due to event network-changed-c128b3a3-0907-4414-9416-ff89769ff3b1. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 726.034714] env[69475]: DEBUG oslo_concurrency.lockutils [req-3887ccac-7b00-44d1-9ab0-8faeef0e344c req-110b55ef-1648-438b-9a34-bf90d09f2a62 service nova] Acquiring lock "refresh_cache-2dd98ffd-b0e6-4447-9c82-57713dc37abd" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 726.035019] env[69475]: DEBUG oslo_concurrency.lockutils [req-3887ccac-7b00-44d1-9ab0-8faeef0e344c req-110b55ef-1648-438b-9a34-bf90d09f2a62 service nova] Acquired lock "refresh_cache-2dd98ffd-b0e6-4447-9c82-57713dc37abd" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 726.035019] env[69475]: DEBUG nova.network.neutron [req-3887ccac-7b00-44d1-9ab0-8faeef0e344c req-110b55ef-1648-438b-9a34-bf90d09f2a62 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Refreshing network info cache for port c128b3a3-0907-4414-9416-ff89769ff3b1 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 726.050654] env[69475]: DEBUG nova.scheduler.client.report [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 726.165104] env[69475]: DEBUG nova.network.neutron [-] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.309136] env[69475]: DEBUG nova.compute.manager [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 726.420222] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "2b0cc71c-862e-4eb0-afc4-b2125003b087" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.420813] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "2b0cc71c-862e-4eb0-afc4-b2125003b087" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.450259] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "e960f967-d693-4ea8-9390-8b0232941c58" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.451216] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "e960f967-d693-4ea8-9390-8b0232941c58" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.557418] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.766s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.557969] env[69475]: DEBUG nova.compute.manager [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 726.561277] env[69475]: DEBUG oslo_concurrency.lockutils [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.989s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.565023] env[69475]: DEBUG nova.objects.instance [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Lazy-loading 'resources' on Instance uuid b87cac84-ea70-428b-872e-4f6145e36b39 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 726.650563] env[69475]: DEBUG nova.network.neutron [req-3887ccac-7b00-44d1-9ab0-8faeef0e344c req-110b55ef-1648-438b-9a34-bf90d09f2a62 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 726.669891] env[69475]: INFO nova.compute.manager [-] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Took 1.71 seconds to deallocate network for instance. [ 726.838958] env[69475]: DEBUG nova.network.neutron [req-3887ccac-7b00-44d1-9ab0-8faeef0e344c req-110b55ef-1648-438b-9a34-bf90d09f2a62 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 726.840081] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.843981] env[69475]: DEBUG nova.compute.manager [req-dc61aaf6-edfc-4e43-9d76-f35dd5e79505 req-da0d3802-aa22-48d2-9cc3-819a99108d18 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Received event network-vif-plugged-2d65f58d-2734-4d3f-b996-8feba5cd4ad2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 726.844412] env[69475]: DEBUG oslo_concurrency.lockutils [req-dc61aaf6-edfc-4e43-9d76-f35dd5e79505 req-da0d3802-aa22-48d2-9cc3-819a99108d18 service nova] Acquiring lock "2dd98ffd-b0e6-4447-9c82-57713dc37abd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.844773] env[69475]: DEBUG oslo_concurrency.lockutils [req-dc61aaf6-edfc-4e43-9d76-f35dd5e79505 req-da0d3802-aa22-48d2-9cc3-819a99108d18 service nova] Lock "2dd98ffd-b0e6-4447-9c82-57713dc37abd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.845313] env[69475]: DEBUG oslo_concurrency.lockutils [req-dc61aaf6-edfc-4e43-9d76-f35dd5e79505 req-da0d3802-aa22-48d2-9cc3-819a99108d18 service nova] Lock "2dd98ffd-b0e6-4447-9c82-57713dc37abd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.845702] env[69475]: DEBUG nova.compute.manager [req-dc61aaf6-edfc-4e43-9d76-f35dd5e79505 req-da0d3802-aa22-48d2-9cc3-819a99108d18 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] No waiting events found dispatching network-vif-plugged-2d65f58d-2734-4d3f-b996-8feba5cd4ad2 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 726.846109] env[69475]: WARNING nova.compute.manager [req-dc61aaf6-edfc-4e43-9d76-f35dd5e79505 req-da0d3802-aa22-48d2-9cc3-819a99108d18 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Received unexpected event network-vif-plugged-2d65f58d-2734-4d3f-b996-8feba5cd4ad2 for instance with vm_state building and task_state spawning. [ 726.846495] env[69475]: DEBUG nova.compute.manager [req-dc61aaf6-edfc-4e43-9d76-f35dd5e79505 req-da0d3802-aa22-48d2-9cc3-819a99108d18 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Received event network-changed-2d65f58d-2734-4d3f-b996-8feba5cd4ad2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 726.847014] env[69475]: DEBUG nova.compute.manager [req-dc61aaf6-edfc-4e43-9d76-f35dd5e79505 req-da0d3802-aa22-48d2-9cc3-819a99108d18 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Refreshing instance network info cache due to event network-changed-2d65f58d-2734-4d3f-b996-8feba5cd4ad2. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 726.847424] env[69475]: DEBUG oslo_concurrency.lockutils [req-dc61aaf6-edfc-4e43-9d76-f35dd5e79505 req-da0d3802-aa22-48d2-9cc3-819a99108d18 service nova] Acquiring lock "refresh_cache-2dd98ffd-b0e6-4447-9c82-57713dc37abd" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.064853] env[69475]: DEBUG nova.compute.utils [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 727.072641] env[69475]: DEBUG nova.compute.manager [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 727.072641] env[69475]: DEBUG nova.network.neutron [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 727.176322] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.350178] env[69475]: DEBUG oslo_concurrency.lockutils [req-3887ccac-7b00-44d1-9ab0-8faeef0e344c req-110b55ef-1648-438b-9a34-bf90d09f2a62 service nova] Releasing lock "refresh_cache-2dd98ffd-b0e6-4447-9c82-57713dc37abd" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 727.350178] env[69475]: DEBUG oslo_concurrency.lockutils [req-dc61aaf6-edfc-4e43-9d76-f35dd5e79505 req-da0d3802-aa22-48d2-9cc3-819a99108d18 service nova] Acquired lock "refresh_cache-2dd98ffd-b0e6-4447-9c82-57713dc37abd" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 727.350178] env[69475]: DEBUG nova.network.neutron [req-dc61aaf6-edfc-4e43-9d76-f35dd5e79505 req-da0d3802-aa22-48d2-9cc3-819a99108d18 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Refreshing network info cache for port 2d65f58d-2734-4d3f-b996-8feba5cd4ad2 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 727.389796] env[69475]: DEBUG nova.network.neutron [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Successfully updated port: 117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 727.415803] env[69475]: DEBUG nova.policy [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8af3406769534139b22fd8533f466b9e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2515a56625d644e684f43a8dfd230ea6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 727.573046] env[69475]: DEBUG nova.compute.manager [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 727.698699] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ff52f3-2bb7-4731-8512-d68276ddebe7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.709392] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4957a5b-a995-4301-b195-2b557c182cd7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.745295] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d970441-f6c0-47de-a461-5a1433ace882 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.757080] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c25e91fe-2419-4d07-b5ed-82c7d69bd0f0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.779898] env[69475]: DEBUG nova.compute.provider_tree [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 727.784942] env[69475]: DEBUG nova.compute.manager [req-94d6def9-83fb-4a3a-9667-f58bd8f84a15 req-7271d0e0-0f53-4e0b-b891-a59f66c08baa service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Received event network-vif-plugged-117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 727.785585] env[69475]: DEBUG oslo_concurrency.lockutils [req-94d6def9-83fb-4a3a-9667-f58bd8f84a15 req-7271d0e0-0f53-4e0b-b891-a59f66c08baa service nova] Acquiring lock "2dd98ffd-b0e6-4447-9c82-57713dc37abd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.785878] env[69475]: DEBUG oslo_concurrency.lockutils [req-94d6def9-83fb-4a3a-9667-f58bd8f84a15 req-7271d0e0-0f53-4e0b-b891-a59f66c08baa service nova] Lock "2dd98ffd-b0e6-4447-9c82-57713dc37abd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.786183] env[69475]: DEBUG oslo_concurrency.lockutils [req-94d6def9-83fb-4a3a-9667-f58bd8f84a15 req-7271d0e0-0f53-4e0b-b891-a59f66c08baa service nova] Lock "2dd98ffd-b0e6-4447-9c82-57713dc37abd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.786546] env[69475]: DEBUG nova.compute.manager [req-94d6def9-83fb-4a3a-9667-f58bd8f84a15 req-7271d0e0-0f53-4e0b-b891-a59f66c08baa service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] No waiting events found dispatching network-vif-plugged-117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 727.786726] env[69475]: WARNING nova.compute.manager [req-94d6def9-83fb-4a3a-9667-f58bd8f84a15 req-7271d0e0-0f53-4e0b-b891-a59f66c08baa service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Received unexpected event network-vif-plugged-117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03 for instance with vm_state building and task_state spawning. [ 727.892401] env[69475]: DEBUG oslo_concurrency.lockutils [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquiring lock "refresh_cache-2dd98ffd-b0e6-4447-9c82-57713dc37abd" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 727.900443] env[69475]: DEBUG nova.network.neutron [req-dc61aaf6-edfc-4e43-9d76-f35dd5e79505 req-da0d3802-aa22-48d2-9cc3-819a99108d18 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 727.998352] env[69475]: DEBUG nova.network.neutron [req-dc61aaf6-edfc-4e43-9d76-f35dd5e79505 req-da0d3802-aa22-48d2-9cc3-819a99108d18 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 728.288905] env[69475]: DEBUG nova.scheduler.client.report [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 728.340949] env[69475]: DEBUG nova.network.neutron [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Successfully created port: 8bc8f5fa-c3ec-45d0-bbd5-84002529188f {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 728.502037] env[69475]: DEBUG oslo_concurrency.lockutils [req-dc61aaf6-edfc-4e43-9d76-f35dd5e79505 req-da0d3802-aa22-48d2-9cc3-819a99108d18 service nova] Releasing lock "refresh_cache-2dd98ffd-b0e6-4447-9c82-57713dc37abd" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 728.502037] env[69475]: DEBUG nova.compute.manager [req-dc61aaf6-edfc-4e43-9d76-f35dd5e79505 req-da0d3802-aa22-48d2-9cc3-819a99108d18 service nova] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Received event network-vif-deleted-2cb85199-de39-4837-a34d-c8ae33659f9b {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 728.502434] env[69475]: DEBUG oslo_concurrency.lockutils [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquired lock "refresh_cache-2dd98ffd-b0e6-4447-9c82-57713dc37abd" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 728.502591] env[69475]: DEBUG nova.network.neutron [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 728.586281] env[69475]: DEBUG nova.compute.manager [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 728.612404] env[69475]: DEBUG nova.virt.hardware [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 728.612958] env[69475]: DEBUG nova.virt.hardware [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 728.612958] env[69475]: DEBUG nova.virt.hardware [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 728.612958] env[69475]: DEBUG nova.virt.hardware [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 728.613126] env[69475]: DEBUG nova.virt.hardware [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 728.613266] env[69475]: DEBUG nova.virt.hardware [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 728.613474] env[69475]: DEBUG nova.virt.hardware [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 728.613790] env[69475]: DEBUG nova.virt.hardware [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 728.613939] env[69475]: DEBUG nova.virt.hardware [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 728.614115] env[69475]: DEBUG nova.virt.hardware [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 728.614304] env[69475]: DEBUG nova.virt.hardware [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 728.615195] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d01e67c-53a0-490c-8305-2f7546751193 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.628170] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c76195-014b-4e4c-b4b2-07a12fc4be0a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.795888] env[69475]: DEBUG oslo_concurrency.lockutils [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.234s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.803244] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 34.676s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.846954] env[69475]: INFO nova.scheduler.client.report [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Deleted allocations for instance b87cac84-ea70-428b-872e-4f6145e36b39 [ 729.056390] env[69475]: DEBUG nova.network.neutron [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 729.362601] env[69475]: DEBUG oslo_concurrency.lockutils [None req-709c3e8c-5f1f-464c-9a79-8af5656a392a tempest-ServerDiagnosticsV248Test-799474206 tempest-ServerDiagnosticsV248Test-799474206-project-member] Lock "b87cac84-ea70-428b-872e-4f6145e36b39" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.214s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.571842] env[69475]: DEBUG nova.network.neutron [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Updating instance_info_cache with network_info: [{"id": "c128b3a3-0907-4414-9416-ff89769ff3b1", "address": "fa:16:3e:f5:a4:96", "network": {"id": "6111d6ef-d4a6-4577-912d-cf402de0ea2a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-54744973", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0f2c2f5187934f5da108a1c96a3a3125", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "557aba95-8968-407a-bac2-2fae66f7c8e5", "external-id": "nsx-vlan-transportzone-45", "segmentation_id": 45, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc128b3a3-09", "ovs_interfaceid": "c128b3a3-0907-4414-9416-ff89769ff3b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2d65f58d-2734-4d3f-b996-8feba5cd4ad2", "address": "fa:16:3e:f3:ea:24", "network": {"id": "da216861-2267-42f5-ba81-aa7ee2dd0934", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1415419905", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.121", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0f2c2f5187934f5da108a1c96a3a3125", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02092ea4-bae0-4e42-b0ab-abc365b4395a", "external-id": "nsx-vlan-transportzone-718", "segmentation_id": 718, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d65f58d-27", "ovs_interfaceid": "2d65f58d-2734-4d3f-b996-8feba5cd4ad2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03", "address": "fa:16:3e:1a:04:3e", "network": {"id": "6111d6ef-d4a6-4577-912d-cf402de0ea2a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-54744973", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.161", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0f2c2f5187934f5da108a1c96a3a3125", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "557aba95-8968-407a-bac2-2fae66f7c8e5", "external-id": "nsx-vlan-transportzone-45", "segmentation_id": 45, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap117bc8d4-1b", "ovs_interfaceid": "117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.734543] env[69475]: DEBUG oslo_concurrency.lockutils [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "5e3e57c5-8367-493f-8268-a0e496c8c878" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 729.734797] env[69475]: DEBUG oslo_concurrency.lockutils [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "5e3e57c5-8367-493f-8268-a0e496c8c878" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 729.865055] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 4465f156-09cc-4eba-90e4-be76f3010363 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 729.865055] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 7be48799-ea4a-4e7f-95c2-637460596cfc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 729.865055] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 25c44ae0-4193-4833-85ec-ebc0ef3cf593 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 729.865055] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance b255f4d7-b177-4d6c-8a28-dcb5a179c1c0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 729.865055] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 93607154-f135-4925-9c3a-a97051535b00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 729.865055] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 235653ac-a893-4f42-a394-dd81f61f0d73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 729.865055] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance a21ec73a-2658-4fc6-9bc1-0e492385d59e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 729.865055] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance d1a316d5-59ef-4286-9d7e-a444ffadc49d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 729.865055] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 3149cd80-503c-42e4-ac91-54aababe84e3 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 729.865055] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 9e2d4d61-71ed-447a-b28e-c29c5bd8d763 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 729.865055] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance e48e2cc1-7d60-457f-8f1c-649f0dda8cdb is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 729.865055] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 4c2e12bf-3f16-47de-a604-44b62a6c7137 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 729.865055] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 8fbabf86-be9e-47ec-8c4c-adea4c68abe8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 729.865055] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 6f530b86-2ed1-41db-929c-8a5dd61d931a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 729.865055] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance ed12921f-9be8-474d-958e-79dd16b8116e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 729.865621] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance d1e5e08d-b41a-4655-997d-91fbd3581f00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 729.865621] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 00ba5cd8-3516-4059-bcda-c2d01e165e07 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 729.865621] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance b41845c6-46bd-4b3b-ab26-d7d2dad08f84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 729.865621] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 2dd98ffd-b0e6-4447-9c82-57713dc37abd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 729.865621] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 730.033242] env[69475]: DEBUG nova.compute.manager [req-59ac9a7d-81dd-4f32-a23d-99473be48c24 req-43d79de5-8050-4338-b275-9aadd773a108 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Received event network-changed-117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 730.033678] env[69475]: DEBUG nova.compute.manager [req-59ac9a7d-81dd-4f32-a23d-99473be48c24 req-43d79de5-8050-4338-b275-9aadd773a108 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Refreshing instance network info cache due to event network-changed-117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 730.034236] env[69475]: DEBUG oslo_concurrency.lockutils [req-59ac9a7d-81dd-4f32-a23d-99473be48c24 req-43d79de5-8050-4338-b275-9aadd773a108 service nova] Acquiring lock "refresh_cache-2dd98ffd-b0e6-4447-9c82-57713dc37abd" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.075134] env[69475]: DEBUG oslo_concurrency.lockutils [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Releasing lock "refresh_cache-2dd98ffd-b0e6-4447-9c82-57713dc37abd" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.075713] env[69475]: DEBUG nova.compute.manager [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Instance network_info: |[{"id": "c128b3a3-0907-4414-9416-ff89769ff3b1", "address": "fa:16:3e:f5:a4:96", "network": {"id": "6111d6ef-d4a6-4577-912d-cf402de0ea2a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-54744973", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0f2c2f5187934f5da108a1c96a3a3125", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "557aba95-8968-407a-bac2-2fae66f7c8e5", "external-id": "nsx-vlan-transportzone-45", "segmentation_id": 45, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc128b3a3-09", "ovs_interfaceid": "c128b3a3-0907-4414-9416-ff89769ff3b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2d65f58d-2734-4d3f-b996-8feba5cd4ad2", "address": "fa:16:3e:f3:ea:24", "network": {"id": "da216861-2267-42f5-ba81-aa7ee2dd0934", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1415419905", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.121", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0f2c2f5187934f5da108a1c96a3a3125", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02092ea4-bae0-4e42-b0ab-abc365b4395a", "external-id": "nsx-vlan-transportzone-718", "segmentation_id": 718, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d65f58d-27", "ovs_interfaceid": "2d65f58d-2734-4d3f-b996-8feba5cd4ad2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03", "address": "fa:16:3e:1a:04:3e", "network": {"id": "6111d6ef-d4a6-4577-912d-cf402de0ea2a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-54744973", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.161", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0f2c2f5187934f5da108a1c96a3a3125", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "557aba95-8968-407a-bac2-2fae66f7c8e5", "external-id": "nsx-vlan-transportzone-45", "segmentation_id": 45, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap117bc8d4-1b", "ovs_interfaceid": "117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 730.075943] env[69475]: DEBUG oslo_concurrency.lockutils [req-59ac9a7d-81dd-4f32-a23d-99473be48c24 req-43d79de5-8050-4338-b275-9aadd773a108 service nova] Acquired lock "refresh_cache-2dd98ffd-b0e6-4447-9c82-57713dc37abd" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.076060] env[69475]: DEBUG nova.network.neutron [req-59ac9a7d-81dd-4f32-a23d-99473be48c24 req-43d79de5-8050-4338-b275-9aadd773a108 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Refreshing network info cache for port 117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 730.077802] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:a4:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '557aba95-8968-407a-bac2-2fae66f7c8e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c128b3a3-0907-4414-9416-ff89769ff3b1', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:ea:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '02092ea4-bae0-4e42-b0ab-abc365b4395a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2d65f58d-2734-4d3f-b996-8feba5cd4ad2', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:04:3e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '557aba95-8968-407a-bac2-2fae66f7c8e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 730.096294] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Creating folder: Project (0f2c2f5187934f5da108a1c96a3a3125). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 730.099488] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8fc5c34b-1448-41d7-9f71-3c07ccaab880 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.112520] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Created folder: Project (0f2c2f5187934f5da108a1c96a3a3125) in parent group-v700823. [ 730.112520] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Creating folder: Instances. Parent ref: group-v700941. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 730.112520] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa9f09e1-6277-4a06-999c-034ed1bc8532 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.127600] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Created folder: Instances in parent group-v700941. [ 730.127600] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 730.128979] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 730.129158] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6927e379-7edb-44ff-8324-917af00c4c68 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.165591] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 730.165591] env[69475]: value = "task-3507897" [ 730.165591] env[69475]: _type = "Task" [ 730.165591] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.175543] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507897, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.371036] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance b71882d4-537d-4a90-b43d-f8ac4ca0d90c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 730.462667] env[69475]: DEBUG nova.network.neutron [req-59ac9a7d-81dd-4f32-a23d-99473be48c24 req-43d79de5-8050-4338-b275-9aadd773a108 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Updated VIF entry in instance network info cache for port 117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 730.463293] env[69475]: DEBUG nova.network.neutron [req-59ac9a7d-81dd-4f32-a23d-99473be48c24 req-43d79de5-8050-4338-b275-9aadd773a108 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Updating instance_info_cache with network_info: [{"id": "c128b3a3-0907-4414-9416-ff89769ff3b1", "address": "fa:16:3e:f5:a4:96", "network": {"id": "6111d6ef-d4a6-4577-912d-cf402de0ea2a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-54744973", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0f2c2f5187934f5da108a1c96a3a3125", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "557aba95-8968-407a-bac2-2fae66f7c8e5", "external-id": "nsx-vlan-transportzone-45", "segmentation_id": 45, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc128b3a3-09", "ovs_interfaceid": "c128b3a3-0907-4414-9416-ff89769ff3b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2d65f58d-2734-4d3f-b996-8feba5cd4ad2", "address": "fa:16:3e:f3:ea:24", "network": {"id": "da216861-2267-42f5-ba81-aa7ee2dd0934", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1415419905", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.121", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0f2c2f5187934f5da108a1c96a3a3125", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02092ea4-bae0-4e42-b0ab-abc365b4395a", "external-id": "nsx-vlan-transportzone-718", "segmentation_id": 718, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d65f58d-27", "ovs_interfaceid": "2d65f58d-2734-4d3f-b996-8feba5cd4ad2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03", "address": "fa:16:3e:1a:04:3e", "network": {"id": "6111d6ef-d4a6-4577-912d-cf402de0ea2a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-54744973", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.161", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0f2c2f5187934f5da108a1c96a3a3125", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "557aba95-8968-407a-bac2-2fae66f7c8e5", "external-id": "nsx-vlan-transportzone-45", "segmentation_id": 45, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap117bc8d4-1b", "ovs_interfaceid": "117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.469571] env[69475]: DEBUG nova.compute.manager [req-c049a65a-46be-48f7-9d2f-d6aeea45ae65 req-2a9ce659-3a4d-4608-a988-7fcfbada385e service nova] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Received event network-vif-plugged-8bc8f5fa-c3ec-45d0-bbd5-84002529188f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 730.469793] env[69475]: DEBUG oslo_concurrency.lockutils [req-c049a65a-46be-48f7-9d2f-d6aeea45ae65 req-2a9ce659-3a4d-4608-a988-7fcfbada385e service nova] Acquiring lock "7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 730.469967] env[69475]: DEBUG oslo_concurrency.lockutils [req-c049a65a-46be-48f7-9d2f-d6aeea45ae65 req-2a9ce659-3a4d-4608-a988-7fcfbada385e service nova] Lock "7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 730.470190] env[69475]: DEBUG oslo_concurrency.lockutils [req-c049a65a-46be-48f7-9d2f-d6aeea45ae65 req-2a9ce659-3a4d-4608-a988-7fcfbada385e service nova] Lock "7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 730.470292] env[69475]: DEBUG nova.compute.manager [req-c049a65a-46be-48f7-9d2f-d6aeea45ae65 req-2a9ce659-3a4d-4608-a988-7fcfbada385e service nova] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] No waiting events found dispatching network-vif-plugged-8bc8f5fa-c3ec-45d0-bbd5-84002529188f {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 730.470460] env[69475]: WARNING nova.compute.manager [req-c049a65a-46be-48f7-9d2f-d6aeea45ae65 req-2a9ce659-3a4d-4608-a988-7fcfbada385e service nova] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Received unexpected event network-vif-plugged-8bc8f5fa-c3ec-45d0-bbd5-84002529188f for instance with vm_state building and task_state spawning. [ 730.549175] env[69475]: DEBUG nova.network.neutron [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Successfully updated port: 8bc8f5fa-c3ec-45d0-bbd5-84002529188f {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 730.679034] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507897, 'name': CreateVM_Task, 'duration_secs': 0.473749} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.679266] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 730.680333] env[69475]: DEBUG oslo_concurrency.lockutils [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.680550] env[69475]: DEBUG oslo_concurrency.lockutils [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.680972] env[69475]: DEBUG oslo_concurrency.lockutils [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 730.681317] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-349c7a3f-ed62-4f1c-89fd-358eac6465d4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.686767] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 730.686767] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5266f878-c4c5-7d09-4772-66b3244ee8a0" [ 730.686767] env[69475]: _type = "Task" [ 730.686767] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.695697] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5266f878-c4c5-7d09-4772-66b3244ee8a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.877288] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 4b3b53d1-82bf-40e7-9988-af7b51e9883a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 730.970024] env[69475]: DEBUG oslo_concurrency.lockutils [req-59ac9a7d-81dd-4f32-a23d-99473be48c24 req-43d79de5-8050-4338-b275-9aadd773a108 service nova] Releasing lock "refresh_cache-2dd98ffd-b0e6-4447-9c82-57713dc37abd" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.052188] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquiring lock "refresh_cache-7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.052358] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquired lock "refresh_cache-7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.052522] env[69475]: DEBUG nova.network.neutron [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 731.200249] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5266f878-c4c5-7d09-4772-66b3244ee8a0, 'name': SearchDatastore_Task, 'duration_secs': 0.0143} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.200622] env[69475]: DEBUG oslo_concurrency.lockutils [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.200922] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 731.201457] env[69475]: DEBUG oslo_concurrency.lockutils [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 731.201457] env[69475]: DEBUG oslo_concurrency.lockutils [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 731.201674] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 731.202030] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-231cd180-73fc-498e-8ce2-0901b732d54a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.212141] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 731.212403] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 731.213433] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0e8e58c-9b24-4f8e-9e40-45e02bc5f353 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.219269] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 731.219269] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529014fb-e6f0-1e11-a9ec-8f55f35758a8" [ 731.219269] env[69475]: _type = "Task" [ 731.219269] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.227930] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529014fb-e6f0-1e11-a9ec-8f55f35758a8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.381213] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 3e332e28-5db5-4f04-8a47-95406da16e21 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 731.616653] env[69475]: DEBUG nova.network.neutron [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 731.731710] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529014fb-e6f0-1e11-a9ec-8f55f35758a8, 'name': SearchDatastore_Task, 'duration_secs': 0.011945} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.732938] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d429eb56-4b8d-4b3f-9cec-59b77c94a1e0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.739472] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 731.739472] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527a68aa-2e32-e1ec-124e-f4bb441673b0" [ 731.739472] env[69475]: _type = "Task" [ 731.739472] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.752024] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527a68aa-2e32-e1ec-124e-f4bb441673b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.878161] env[69475]: DEBUG nova.network.neutron [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Updating instance_info_cache with network_info: [{"id": "8bc8f5fa-c3ec-45d0-bbd5-84002529188f", "address": "fa:16:3e:97:f8:7f", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.224", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bc8f5fa-c3", "ovs_interfaceid": "8bc8f5fa-c3ec-45d0-bbd5-84002529188f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.885905] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 712e93b6-e797-4b9f-b39b-33373cede403 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 732.249998] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527a68aa-2e32-e1ec-124e-f4bb441673b0, 'name': SearchDatastore_Task, 'duration_secs': 0.013839} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.250299] env[69475]: DEBUG oslo_concurrency.lockutils [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 732.250586] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 2dd98ffd-b0e6-4447-9c82-57713dc37abd/2dd98ffd-b0e6-4447-9c82-57713dc37abd.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 732.250871] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-943ad085-bce4-4505-8749-febc5777d23a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.259829] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 732.259829] env[69475]: value = "task-3507898" [ 732.259829] env[69475]: _type = "Task" [ 732.259829] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.268495] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3507898, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.385580] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Releasing lock "refresh_cache-7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 732.385958] env[69475]: DEBUG nova.compute.manager [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Instance network_info: |[{"id": "8bc8f5fa-c3ec-45d0-bbd5-84002529188f", "address": "fa:16:3e:97:f8:7f", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.224", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bc8f5fa-c3", "ovs_interfaceid": "8bc8f5fa-c3ec-45d0-bbd5-84002529188f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 732.387056] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:f8:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8bc8f5fa-c3ec-45d0-bbd5-84002529188f', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 732.397412] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Creating folder: Project (2515a56625d644e684f43a8dfd230ea6). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 732.400943] env[69475]: DEBUG oslo_vmware.rw_handles [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523fd40b-535d-d749-6494-485499b8903f/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 732.401106] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance daef2117-0d9f-4c9e-99e7-1e8a65aa1f22 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 732.402479] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-739d52f0-311a-4b95-99a4-a7ea416630fc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.405812] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a56e857a-fcd6-4ec2-a43f-2b1b6c91deba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.414314] env[69475]: DEBUG oslo_vmware.rw_handles [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523fd40b-535d-d749-6494-485499b8903f/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 732.414459] env[69475]: ERROR oslo_vmware.rw_handles [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523fd40b-535d-d749-6494-485499b8903f/disk-0.vmdk due to incomplete transfer. [ 732.414716] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-741ec285-ad14-4e73-a1d9-e2a8c590f433 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.421873] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Created folder: Project (2515a56625d644e684f43a8dfd230ea6) in parent group-v700823. [ 732.422337] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Creating folder: Instances. Parent ref: group-v700944. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 732.423993] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-682892b1-46a6-42a5-be04-63c329659876 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.427540] env[69475]: DEBUG oslo_vmware.rw_handles [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/523fd40b-535d-d749-6494-485499b8903f/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 732.428975] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Uploaded image 609f948e-c80f-49cc-9305-9d24cccf9d02 to the Glance image server {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 732.430192] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 732.430820] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-18cdddac-de12-4ed2-994e-a94d9f158b0d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.433964] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Created folder: Instances in parent group-v700944. [ 732.434242] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 732.434826] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 732.435374] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f3b9ed0a-a1b0-4ca3-9787-ba69537c8b9b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.455244] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the task: (returnval){ [ 732.455244] env[69475]: value = "task-3507901" [ 732.455244] env[69475]: _type = "Task" [ 732.455244] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.459208] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 732.459208] env[69475]: value = "task-3507902" [ 732.459208] env[69475]: _type = "Task" [ 732.459208] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.464975] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507901, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.470813] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507902, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.585816] env[69475]: DEBUG nova.compute.manager [req-39782a20-19c1-457a-821a-5a6537379f12 req-ce49f890-3e48-439b-9cb1-ff1af7257aa0 service nova] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Received event network-changed-8bc8f5fa-c3ec-45d0-bbd5-84002529188f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 732.585816] env[69475]: DEBUG nova.compute.manager [req-39782a20-19c1-457a-821a-5a6537379f12 req-ce49f890-3e48-439b-9cb1-ff1af7257aa0 service nova] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Refreshing instance network info cache due to event network-changed-8bc8f5fa-c3ec-45d0-bbd5-84002529188f. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 732.586635] env[69475]: DEBUG oslo_concurrency.lockutils [req-39782a20-19c1-457a-821a-5a6537379f12 req-ce49f890-3e48-439b-9cb1-ff1af7257aa0 service nova] Acquiring lock "refresh_cache-7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.586635] env[69475]: DEBUG oslo_concurrency.lockutils [req-39782a20-19c1-457a-821a-5a6537379f12 req-ce49f890-3e48-439b-9cb1-ff1af7257aa0 service nova] Acquired lock "refresh_cache-7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 732.586635] env[69475]: DEBUG nova.network.neutron [req-39782a20-19c1-457a-821a-5a6537379f12 req-ce49f890-3e48-439b-9cb1-ff1af7257aa0 service nova] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Refreshing network info cache for port 8bc8f5fa-c3ec-45d0-bbd5-84002529188f {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 732.774767] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3507898, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.911083] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance baf27027-678d-4167-bb9b-df410aeb0e82 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 732.967012] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507901, 'name': Destroy_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.972482] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507902, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.276339] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3507898, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541363} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.276822] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 2dd98ffd-b0e6-4447-9c82-57713dc37abd/2dd98ffd-b0e6-4447-9c82-57713dc37abd.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 733.277794] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 733.278509] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2592ab67-cac0-402c-aeaf-51134edd9a28 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.285964] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 733.285964] env[69475]: value = "task-3507903" [ 733.285964] env[69475]: _type = "Task" [ 733.285964] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.295453] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3507903, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.413950] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance df73dd41-7455-4482-abb2-b61b26fcf403 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 733.428130] env[69475]: DEBUG nova.network.neutron [req-39782a20-19c1-457a-821a-5a6537379f12 req-ce49f890-3e48-439b-9cb1-ff1af7257aa0 service nova] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Updated VIF entry in instance network info cache for port 8bc8f5fa-c3ec-45d0-bbd5-84002529188f. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 733.428502] env[69475]: DEBUG nova.network.neutron [req-39782a20-19c1-457a-821a-5a6537379f12 req-ce49f890-3e48-439b-9cb1-ff1af7257aa0 service nova] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Updating instance_info_cache with network_info: [{"id": "8bc8f5fa-c3ec-45d0-bbd5-84002529188f", "address": "fa:16:3e:97:f8:7f", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.224", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bc8f5fa-c3", "ovs_interfaceid": "8bc8f5fa-c3ec-45d0-bbd5-84002529188f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.478067] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507902, 'name': CreateVM_Task, 'duration_secs': 0.60798} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.478362] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507901, 'name': Destroy_Task, 'duration_secs': 0.689898} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.478578] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 733.478793] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Destroyed the VM [ 733.479069] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 733.480082] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.480249] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 733.480556] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 733.480782] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7ac3504c-92ae-4307-ba36-da3b81fa1ee7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.485019] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc23dafb-e5a5-4791-a37e-fd67c6d7335c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.487970] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 733.487970] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f0e5c-983a-6b41-ca80-7d9092a79292" [ 733.487970] env[69475]: _type = "Task" [ 733.487970] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.493703] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the task: (returnval){ [ 733.493703] env[69475]: value = "task-3507904" [ 733.493703] env[69475]: _type = "Task" [ 733.493703] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.499497] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f0e5c-983a-6b41-ca80-7d9092a79292, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.503774] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507904, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.804294] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3507903, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070537} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.804554] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 733.808874] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49210416-a757-4ee2-ae59-341d5374600d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.838778] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Reconfiguring VM instance instance-00000025 to attach disk [datastore2] 2dd98ffd-b0e6-4447-9c82-57713dc37abd/2dd98ffd-b0e6-4447-9c82-57713dc37abd.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 733.839123] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1292cd14-a0ab-4d48-afe9-29787a031389 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.860979] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 733.860979] env[69475]: value = "task-3507905" [ 733.860979] env[69475]: _type = "Task" [ 733.860979] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.868983] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3507905, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.918020] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance a75d7a92-4ac7-4fa0-90f0-f0a0993e881e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 733.930613] env[69475]: DEBUG oslo_concurrency.lockutils [req-39782a20-19c1-457a-821a-5a6537379f12 req-ce49f890-3e48-439b-9cb1-ff1af7257aa0 service nova] Releasing lock "refresh_cache-7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.005277] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f0e5c-983a-6b41-ca80-7d9092a79292, 'name': SearchDatastore_Task, 'duration_secs': 0.021515} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.010069] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 734.010370] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 734.011057] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.011285] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 734.011485] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 734.011785] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507904, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.012008] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e4708de-0be8-41a5-8a4b-5c2376d42f50 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.021197] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 734.021529] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 734.022212] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bb8e9f7-f2b9-4f85-9c33-f09f15f2480c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.030344] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 734.030344] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52417aef-9ddf-71be-6cb4-cb1aa63a1e8f" [ 734.030344] env[69475]: _type = "Task" [ 734.030344] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.040664] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52417aef-9ddf-71be-6cb4-cb1aa63a1e8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.371728] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3507905, 'name': ReconfigVM_Task, 'duration_secs': 0.336188} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.372012] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Reconfigured VM instance instance-00000025 to attach disk [datastore2] 2dd98ffd-b0e6-4447-9c82-57713dc37abd/2dd98ffd-b0e6-4447-9c82-57713dc37abd.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 734.372708] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3c6813ca-d405-4fd4-bc83-ecbd50f558a5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.380015] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 734.380015] env[69475]: value = "task-3507906" [ 734.380015] env[69475]: _type = "Task" [ 734.380015] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.392140] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3507906, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.423291] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 8bea34ef-0caf-4cdb-a689-dd747d9b52ea has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 734.508254] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507904, 'name': RemoveSnapshot_Task} progress is 15%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.547023] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52417aef-9ddf-71be-6cb4-cb1aa63a1e8f, 'name': SearchDatastore_Task, 'duration_secs': 0.01054} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.547023] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccb3b73c-db1a-4436-ac74-17685dec4add {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.555098] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 734.555098] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c52e7a-1287-a9f9-b65f-c356aee46b7a" [ 734.555098] env[69475]: _type = "Task" [ 734.555098] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.562681] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c52e7a-1287-a9f9-b65f-c356aee46b7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.893718] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3507906, 'name': Rename_Task, 'duration_secs': 0.177045} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.893841] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 734.894020] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4535dec2-4c99-46f3-b188-8ac07464b6a5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.900517] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 734.900517] env[69475]: value = "task-3507907" [ 734.900517] env[69475]: _type = "Task" [ 734.900517] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.909111] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3507907, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.929127] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 3fba85c9-7798-4a66-b335-21f80962e0bd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 735.011042] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507904, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.066221] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c52e7a-1287-a9f9-b65f-c356aee46b7a, 'name': SearchDatastore_Task, 'duration_secs': 0.021076} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.067306] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.067630] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9/7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 735.067938] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ac873150-5573-4121-8cfa-035796ccd3ae {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.076240] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 735.076240] env[69475]: value = "task-3507908" [ 735.076240] env[69475]: _type = "Task" [ 735.076240] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.084147] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507908, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.414289] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3507907, 'name': PowerOnVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.433263] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 420ecc09-60c8-4a14-8504-d11d760ddbb4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 735.506802] env[69475]: DEBUG oslo_vmware.api [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507904, 'name': RemoveSnapshot_Task, 'duration_secs': 1.524913} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.511034] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 735.511034] env[69475]: INFO nova.compute.manager [None req-405c9f65-9f56-4d4d-a1cc-adc566df3816 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Took 16.73 seconds to snapshot the instance on the hypervisor. [ 735.590324] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507908, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489695} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.590827] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9/7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 735.592399] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 735.593304] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d3cdb6ff-79d5-43f9-830c-b3c77eb37962 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.600225] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 735.600225] env[69475]: value = "task-3507909" [ 735.600225] env[69475]: _type = "Task" [ 735.600225] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.608307] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507909, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.915143] env[69475]: DEBUG oslo_vmware.api [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3507907, 'name': PowerOnVM_Task, 'duration_secs': 0.571874} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.915143] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 735.915143] env[69475]: INFO nova.compute.manager [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Took 17.01 seconds to spawn the instance on the hypervisor. [ 735.915143] env[69475]: DEBUG nova.compute.manager [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 735.915706] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a9476c-849c-4d8d-acd6-b8893c0498b1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.941603] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 41c23568-c8d7-4d6c-8cc4-a94c95b3223a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 736.110415] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507909, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070306} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.110713] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 736.111753] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d8d72f1-3710-4b95-b5be-f40ba1bbb02d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.138116] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Reconfiguring VM instance instance-00000026 to attach disk [datastore2] 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9/7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 736.138412] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15b3cb80-f401-4378-b8de-d8d75e579ad3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.158482] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 736.158482] env[69475]: value = "task-3507910" [ 736.158482] env[69475]: _type = "Task" [ 736.158482] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.168231] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507910, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.433908] env[69475]: INFO nova.compute.manager [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Took 48.56 seconds to build instance. [ 736.444761] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 736.670555] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507910, 'name': ReconfigVM_Task, 'duration_secs': 0.286418} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.670961] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Reconfigured VM instance instance-00000026 to attach disk [datastore2] 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9/7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 736.671718] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5f2f5e72-3039-45df-9714-5ada5a177867 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.678741] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 736.678741] env[69475]: value = "task-3507911" [ 736.678741] env[69475]: _type = "Task" [ 736.678741] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.689336] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507911, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.935901] env[69475]: DEBUG oslo_concurrency.lockutils [None req-840b2971-f139-4767-b2c5-8416e16b5419 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "2dd98ffd-b0e6-4447-9c82-57713dc37abd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.729s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 736.947717] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 2e7066ca-162e-4465-a9c1-5422510e4c0f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 737.006409] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquiring lock "2dd98ffd-b0e6-4447-9c82-57713dc37abd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.006633] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "2dd98ffd-b0e6-4447-9c82-57713dc37abd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.006870] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquiring lock "2dd98ffd-b0e6-4447-9c82-57713dc37abd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.007040] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "2dd98ffd-b0e6-4447-9c82-57713dc37abd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.007235] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "2dd98ffd-b0e6-4447-9c82-57713dc37abd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.009310] env[69475]: INFO nova.compute.manager [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Terminating instance [ 737.190519] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507911, 'name': Rename_Task, 'duration_secs': 0.133876} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.190794] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 737.191057] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ccdebd60-1945-4802-9cd9-83e97be2ee67 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.199405] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 737.199405] env[69475]: value = "task-3507912" [ 737.199405] env[69475]: _type = "Task" [ 737.199405] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.207110] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507912, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.444021] env[69475]: DEBUG nova.compute.manager [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 737.452416] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 86464a01-e034-43b6-a6d5-45f9e3b6715b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 737.520110] env[69475]: DEBUG nova.compute.manager [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 737.520110] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 737.520110] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033b3eb9-c85b-4f76-8b1c-5120dc532478 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.528179] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 737.528620] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c5d27a1-4b3a-4e07-a385-ff3b8e6d6a7a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.536756] env[69475]: DEBUG oslo_vmware.api [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 737.536756] env[69475]: value = "task-3507916" [ 737.536756] env[69475]: _type = "Task" [ 737.536756] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.547327] env[69475]: DEBUG oslo_vmware.api [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3507916, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.617925] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Acquiring lock "4c2e12bf-3f16-47de-a604-44b62a6c7137" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.618571] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Lock "4c2e12bf-3f16-47de-a604-44b62a6c7137" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.619138] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Acquiring lock "4c2e12bf-3f16-47de-a604-44b62a6c7137-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.619325] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Lock "4c2e12bf-3f16-47de-a604-44b62a6c7137-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.619534] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Lock "4c2e12bf-3f16-47de-a604-44b62a6c7137-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.621831] env[69475]: INFO nova.compute.manager [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Terminating instance [ 737.709834] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507912, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.960202] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 2b0cc71c-862e-4eb0-afc4-b2125003b087 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 737.972256] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.051221] env[69475]: DEBUG oslo_vmware.api [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3507916, 'name': PowerOffVM_Task, 'duration_secs': 0.262139} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.052061] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 738.052373] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 738.052703] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-74f6f36d-37fc-4e4b-9595-8f16397bd113 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.127359] env[69475]: DEBUG nova.compute.manager [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 738.132033] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 738.133145] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af335c2a-24e7-4b4c-ad11-ab4daa608e27 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.140775] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 738.141287] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3127110c-505f-45cd-8789-c4669cb57d89 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.150999] env[69475]: DEBUG oslo_vmware.api [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the task: (returnval){ [ 738.150999] env[69475]: value = "task-3507918" [ 738.150999] env[69475]: _type = "Task" [ 738.150999] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.157020] env[69475]: DEBUG oslo_vmware.api [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507918, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.180075] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 738.180075] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 738.180075] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Deleting the datastore file [datastore2] 2dd98ffd-b0e6-4447-9c82-57713dc37abd {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 738.180075] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e582c7e-6157-4f6c-a8df-1a60cab0929c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.185259] env[69475]: DEBUG oslo_vmware.api [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 738.185259] env[69475]: value = "task-3507919" [ 738.185259] env[69475]: _type = "Task" [ 738.185259] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.196425] env[69475]: DEBUG oslo_vmware.api [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3507919, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.209286] env[69475]: DEBUG oslo_vmware.api [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507912, 'name': PowerOnVM_Task, 'duration_secs': 0.553294} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.209600] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 738.209839] env[69475]: INFO nova.compute.manager [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Took 9.62 seconds to spawn the instance on the hypervisor. [ 738.210123] env[69475]: DEBUG nova.compute.manager [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 738.215793] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63a9a89-16fa-4e98-9051-2286e0ddf08a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.464283] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance e960f967-d693-4ea8-9390-8b0232941c58 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 738.657902] env[69475]: DEBUG oslo_vmware.api [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507918, 'name': PowerOffVM_Task, 'duration_secs': 0.258966} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.658418] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 738.658607] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 738.658876] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21b2fe8c-3e9a-46b3-b4dd-91d5e99e64ed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.694911] env[69475]: DEBUG oslo_vmware.api [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3507919, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.227462} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.695313] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 738.695620] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 738.695933] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 738.696260] env[69475]: INFO nova.compute.manager [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Took 1.18 seconds to destroy the instance on the hypervisor. [ 738.696606] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 738.696798] env[69475]: DEBUG nova.compute.manager [-] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 738.696893] env[69475]: DEBUG nova.network.neutron [-] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 738.719879] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 738.720233] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 738.720435] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Deleting the datastore file [datastore2] 4c2e12bf-3f16-47de-a604-44b62a6c7137 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 738.720703] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c474bdb5-9bb8-4f66-915e-762eddfec0a5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.732374] env[69475]: DEBUG oslo_vmware.api [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for the task: (returnval){ [ 738.732374] env[69475]: value = "task-3507921" [ 738.732374] env[69475]: _type = "Task" [ 738.732374] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.742106] env[69475]: INFO nova.compute.manager [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Took 47.30 seconds to build instance. [ 738.753330] env[69475]: DEBUG oslo_vmware.api [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507921, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.967980] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 5e3e57c5-8367-493f-8268-a0e496c8c878 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 738.968356] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 738.968528] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 739.164687] env[69475]: DEBUG nova.compute.manager [req-e550d8e4-6ba5-4a71-9670-4de6c4eb20f0 req-8dc7fe67-3b8a-4afa-a16d-c512f2b582a3 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Received event network-vif-deleted-117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 739.164687] env[69475]: INFO nova.compute.manager [req-e550d8e4-6ba5-4a71-9670-4de6c4eb20f0 req-8dc7fe67-3b8a-4afa-a16d-c512f2b582a3 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Neutron deleted interface 117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03; detaching it from the instance and deleting it from the info cache [ 739.164687] env[69475]: DEBUG nova.network.neutron [req-e550d8e4-6ba5-4a71-9670-4de6c4eb20f0 req-8dc7fe67-3b8a-4afa-a16d-c512f2b582a3 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Updating instance_info_cache with network_info: [{"id": "c128b3a3-0907-4414-9416-ff89769ff3b1", "address": "fa:16:3e:f5:a4:96", "network": {"id": "6111d6ef-d4a6-4577-912d-cf402de0ea2a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-54744973", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0f2c2f5187934f5da108a1c96a3a3125", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "557aba95-8968-407a-bac2-2fae66f7c8e5", "external-id": "nsx-vlan-transportzone-45", "segmentation_id": 45, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc128b3a3-09", "ovs_interfaceid": "c128b3a3-0907-4414-9416-ff89769ff3b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "2d65f58d-2734-4d3f-b996-8feba5cd4ad2", "address": "fa:16:3e:f3:ea:24", "network": {"id": "da216861-2267-42f5-ba81-aa7ee2dd0934", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1415419905", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.121", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "0f2c2f5187934f5da108a1c96a3a3125", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02092ea4-bae0-4e42-b0ab-abc365b4395a", "external-id": "nsx-vlan-transportzone-718", "segmentation_id": 718, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2d65f58d-27", "ovs_interfaceid": "2d65f58d-2734-4d3f-b996-8feba5cd4ad2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.250239] env[69475]: DEBUG oslo_vmware.api [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Task: {'id': task-3507921, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.202278} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.250663] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 739.250976] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 739.251336] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 739.251645] env[69475]: INFO nova.compute.manager [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Took 1.12 seconds to destroy the instance on the hypervisor. [ 739.252035] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 739.252365] env[69475]: DEBUG nova.compute.manager [-] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 739.252579] env[69475]: DEBUG nova.network.neutron [-] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 739.254583] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dfbbc96c-612c-43f8-a1fa-5d6bb66f3774 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.451s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.593653] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b6dd9e1-ac0f-42ec-a741-3a9d8a857c51 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.604676] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cc9dd3f-6ed5-4df9-874f-c8440f1c5db1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.638865] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-804a95d8-e269-433d-809e-fce33fbc4a37 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.645912] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6ec34d-86f9-478e-a48e-d4d3de36a744 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.660434] env[69475]: DEBUG nova.compute.provider_tree [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.666629] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-faea47f3-022d-4b5e-866e-07ce3e1258ac {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.676637] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-471e6ed5-a146-441d-a8c5-3178366f8c24 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.715330] env[69475]: DEBUG nova.compute.manager [req-e550d8e4-6ba5-4a71-9670-4de6c4eb20f0 req-8dc7fe67-3b8a-4afa-a16d-c512f2b582a3 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Detach interface failed, port_id=117bc8d4-1b9e-4bd2-b86e-e4226d5aeb03, reason: Instance 2dd98ffd-b0e6-4447-9c82-57713dc37abd could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 739.759703] env[69475]: DEBUG nova.compute.manager [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 739.904191] env[69475]: DEBUG nova.compute.manager [req-719a0e32-b3d0-49ce-8e78-5700df93591d req-ca069284-5ba2-43e2-9ab1-a70d62990b0e service nova] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Received event network-vif-deleted-c1fc8b83-7009-439e-b6cd-c8b86d680b84 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 739.904408] env[69475]: INFO nova.compute.manager [req-719a0e32-b3d0-49ce-8e78-5700df93591d req-ca069284-5ba2-43e2-9ab1-a70d62990b0e service nova] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Neutron deleted interface c1fc8b83-7009-439e-b6cd-c8b86d680b84; detaching it from the instance and deleting it from the info cache [ 739.904584] env[69475]: DEBUG nova.network.neutron [req-719a0e32-b3d0-49ce-8e78-5700df93591d req-ca069284-5ba2-43e2-9ab1-a70d62990b0e service nova] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.165450] env[69475]: DEBUG nova.scheduler.client.report [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 740.234444] env[69475]: DEBUG nova.network.neutron [-] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.290922] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 740.407323] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0914bceb-33fd-49af-b135-3904434054ea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.418450] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ad4ebde-3e9a-4774-96df-9bf43334740a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.450284] env[69475]: DEBUG nova.compute.manager [req-719a0e32-b3d0-49ce-8e78-5700df93591d req-ca069284-5ba2-43e2-9ab1-a70d62990b0e service nova] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Detach interface failed, port_id=c1fc8b83-7009-439e-b6cd-c8b86d680b84, reason: Instance 4c2e12bf-3f16-47de-a604-44b62a6c7137 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 740.499782] env[69475]: DEBUG nova.network.neutron [-] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.672269] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69475) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 740.672485] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 11.872s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.672998] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 46.029s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.672998] env[69475]: DEBUG nova.objects.instance [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69475) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 740.737032] env[69475]: INFO nova.compute.manager [-] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Took 1.48 seconds to deallocate network for instance. [ 741.005072] env[69475]: INFO nova.compute.manager [-] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Took 2.31 seconds to deallocate network for instance. [ 741.200039] env[69475]: DEBUG nova.compute.manager [req-815bd76b-a086-457f-b090-d89457d41e1c req-6632298f-522a-4e5a-a6c0-e73a12bd9256 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Received event network-vif-deleted-c128b3a3-0907-4414-9416-ff89769ff3b1 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 741.200375] env[69475]: DEBUG nova.compute.manager [req-815bd76b-a086-457f-b090-d89457d41e1c req-6632298f-522a-4e5a-a6c0-e73a12bd9256 service nova] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Received event network-vif-deleted-2d65f58d-2734-4d3f-b996-8feba5cd4ad2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 741.244391] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.266906] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "8d50b322-fa03-4e48-b74b-a63578e4701c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.267177] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "8d50b322-fa03-4e48-b74b-a63578e4701c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.512378] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.683732] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aec23e2d-c16f-4c07-9af8-e3ff5187fc38 tempest-ServersAdmin275Test-1434252538 tempest-ServersAdmin275Test-1434252538-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.684948] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 45.494s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.686452] env[69475]: INFO nova.compute.claims [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 743.187918] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271102dc-b20e-408d-a5ae-25d0fdbc4225 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.195960] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f822a673-8d1b-41ca-8f97-0f02e544e872 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.227337] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ffddf97-1253-4ec9-b857-a7dc6ef12834 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.235253] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbdfc06-045d-498d-aa5c-df05d3258c19 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.248603] env[69475]: DEBUG nova.compute.provider_tree [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 743.752034] env[69475]: DEBUG nova.scheduler.client.report [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 744.258023] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.572s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.258023] env[69475]: DEBUG nova.compute.manager [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 744.260406] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.023s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.260597] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.264068] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 44.807s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.265015] env[69475]: INFO nova.compute.claims [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 744.290568] env[69475]: INFO nova.scheduler.client.report [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Deleted allocations for instance 3149cd80-503c-42e4-ac91-54aababe84e3 [ 744.773234] env[69475]: DEBUG nova.compute.utils [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 744.774621] env[69475]: DEBUG nova.compute.manager [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 744.774791] env[69475]: DEBUG nova.network.neutron [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 744.798143] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7c775132-29b6-4e07-a379-09a14138b666 tempest-ServersAdmin275Test-139972756 tempest-ServersAdmin275Test-139972756-project-member] Lock "3149cd80-503c-42e4-ac91-54aababe84e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.792s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.835264] env[69475]: DEBUG nova.policy [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8af3406769534139b22fd8533f466b9e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2515a56625d644e684f43a8dfd230ea6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 745.249082] env[69475]: DEBUG nova.network.neutron [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Successfully created port: f46198f7-e2cd-4d21-8b63-33c585b37c57 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 745.280985] env[69475]: DEBUG nova.compute.manager [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 745.892698] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a166469-7967-425c-bbb9-41549768326c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.900207] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76196968-e3a2-4cd7-a0c2-c03851f27a8e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.929992] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8074f5e7-1c4a-433b-8dcc-e7d9ab619b4a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.937203] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4743b6d-6a83-41eb-933d-cf11abbc793b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.950171] env[69475]: DEBUG nova.compute.provider_tree [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 746.303808] env[69475]: DEBUG nova.compute.manager [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 746.340382] env[69475]: DEBUG nova.virt.hardware [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 746.340782] env[69475]: DEBUG nova.virt.hardware [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 746.341058] env[69475]: DEBUG nova.virt.hardware [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 746.341451] env[69475]: DEBUG nova.virt.hardware [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 746.341615] env[69475]: DEBUG nova.virt.hardware [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 746.341874] env[69475]: DEBUG nova.virt.hardware [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 746.342230] env[69475]: DEBUG nova.virt.hardware [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 746.342503] env[69475]: DEBUG nova.virt.hardware [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 746.342788] env[69475]: DEBUG nova.virt.hardware [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 746.343091] env[69475]: DEBUG nova.virt.hardware [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 746.343402] env[69475]: DEBUG nova.virt.hardware [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 746.344757] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e25bbb5-ba23-40d3-b42c-3e495d5f7b28 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.357540] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-276fae3d-e8cf-4862-a7ce-0741009eb248 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.452988] env[69475]: DEBUG nova.scheduler.client.report [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 746.898917] env[69475]: DEBUG nova.compute.manager [req-64eb2196-d14c-4d03-b2f3-b40d9278de46 req-2a3d051d-c3dd-45ce-b1ab-2162f21f05a8 service nova] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Received event network-vif-plugged-f46198f7-e2cd-4d21-8b63-33c585b37c57 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 746.899229] env[69475]: DEBUG oslo_concurrency.lockutils [req-64eb2196-d14c-4d03-b2f3-b40d9278de46 req-2a3d051d-c3dd-45ce-b1ab-2162f21f05a8 service nova] Acquiring lock "b71882d4-537d-4a90-b43d-f8ac4ca0d90c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 746.899506] env[69475]: DEBUG oslo_concurrency.lockutils [req-64eb2196-d14c-4d03-b2f3-b40d9278de46 req-2a3d051d-c3dd-45ce-b1ab-2162f21f05a8 service nova] Lock "b71882d4-537d-4a90-b43d-f8ac4ca0d90c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.899710] env[69475]: DEBUG oslo_concurrency.lockutils [req-64eb2196-d14c-4d03-b2f3-b40d9278de46 req-2a3d051d-c3dd-45ce-b1ab-2162f21f05a8 service nova] Lock "b71882d4-537d-4a90-b43d-f8ac4ca0d90c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.899926] env[69475]: DEBUG nova.compute.manager [req-64eb2196-d14c-4d03-b2f3-b40d9278de46 req-2a3d051d-c3dd-45ce-b1ab-2162f21f05a8 service nova] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] No waiting events found dispatching network-vif-plugged-f46198f7-e2cd-4d21-8b63-33c585b37c57 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 746.900239] env[69475]: WARNING nova.compute.manager [req-64eb2196-d14c-4d03-b2f3-b40d9278de46 req-2a3d051d-c3dd-45ce-b1ab-2162f21f05a8 service nova] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Received unexpected event network-vif-plugged-f46198f7-e2cd-4d21-8b63-33c585b37c57 for instance with vm_state building and task_state spawning. [ 746.957954] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.695s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.958472] env[69475]: DEBUG nova.compute.manager [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 746.965024] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.527s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.965024] env[69475]: INFO nova.compute.claims [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 747.072463] env[69475]: DEBUG nova.network.neutron [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Successfully updated port: f46198f7-e2cd-4d21-8b63-33c585b37c57 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 747.470452] env[69475]: DEBUG nova.compute.utils [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 747.472231] env[69475]: DEBUG nova.compute.manager [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 747.472231] env[69475]: DEBUG nova.network.neutron [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 747.537802] env[69475]: DEBUG nova.policy [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2cadefdf967f4ef1b0c24f7bb0b7d6d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dde7ecd407ae48f6a5d1b791df065d6e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 747.574234] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquiring lock "refresh_cache-b71882d4-537d-4a90-b43d-f8ac4ca0d90c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.575043] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquired lock "refresh_cache-b71882d4-537d-4a90-b43d-f8ac4ca0d90c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.575043] env[69475]: DEBUG nova.network.neutron [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 747.976267] env[69475]: DEBUG nova.compute.manager [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 748.057506] env[69475]: DEBUG nova.network.neutron [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Successfully created port: fd636137-6583-4c7a-937a-701561e4141a {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 748.114827] env[69475]: DEBUG nova.network.neutron [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 748.277759] env[69475]: DEBUG nova.network.neutron [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Updating instance_info_cache with network_info: [{"id": "f46198f7-e2cd-4d21-8b63-33c585b37c57", "address": "fa:16:3e:db:21:da", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf46198f7-e2", "ovs_interfaceid": "f46198f7-e2cd-4d21-8b63-33c585b37c57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.531483] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5763290-c278-45ca-9358-c288fb0130e2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.539951] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d00d2c-95a8-49cd-95e1-73c8b8e99e9d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.574991] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7081368-49ac-460e-a6da-25a555dac5d1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.583129] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2dfa86-25bf-4727-a147-b81ca11fe63a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.597747] env[69475]: DEBUG nova.compute.provider_tree [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.780060] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Releasing lock "refresh_cache-b71882d4-537d-4a90-b43d-f8ac4ca0d90c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 748.781027] env[69475]: DEBUG nova.compute.manager [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Instance network_info: |[{"id": "f46198f7-e2cd-4d21-8b63-33c585b37c57", "address": "fa:16:3e:db:21:da", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf46198f7-e2", "ovs_interfaceid": "f46198f7-e2cd-4d21-8b63-33c585b37c57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 748.781521] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:21:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f46198f7-e2cd-4d21-8b63-33c585b37c57', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 748.789394] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 748.789541] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 748.789745] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6df3080b-9cdb-4ad5-b907-a03d9ee6cbed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.812257] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 748.812257] env[69475]: value = "task-3507927" [ 748.812257] env[69475]: _type = "Task" [ 748.812257] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.822913] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507927, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.945409] env[69475]: DEBUG nova.compute.manager [req-6998ced1-6421-4543-b014-2c0eb6ff0c30 req-44447e8e-3368-4159-ac4b-c9b34279235b service nova] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Received event network-changed-f46198f7-e2cd-4d21-8b63-33c585b37c57 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 748.945609] env[69475]: DEBUG nova.compute.manager [req-6998ced1-6421-4543-b014-2c0eb6ff0c30 req-44447e8e-3368-4159-ac4b-c9b34279235b service nova] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Refreshing instance network info cache due to event network-changed-f46198f7-e2cd-4d21-8b63-33c585b37c57. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 748.945820] env[69475]: DEBUG oslo_concurrency.lockutils [req-6998ced1-6421-4543-b014-2c0eb6ff0c30 req-44447e8e-3368-4159-ac4b-c9b34279235b service nova] Acquiring lock "refresh_cache-b71882d4-537d-4a90-b43d-f8ac4ca0d90c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.945983] env[69475]: DEBUG oslo_concurrency.lockutils [req-6998ced1-6421-4543-b014-2c0eb6ff0c30 req-44447e8e-3368-4159-ac4b-c9b34279235b service nova] Acquired lock "refresh_cache-b71882d4-537d-4a90-b43d-f8ac4ca0d90c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 748.946123] env[69475]: DEBUG nova.network.neutron [req-6998ced1-6421-4543-b014-2c0eb6ff0c30 req-44447e8e-3368-4159-ac4b-c9b34279235b service nova] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Refreshing network info cache for port f46198f7-e2cd-4d21-8b63-33c585b37c57 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 748.990582] env[69475]: DEBUG nova.compute.manager [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 749.018573] env[69475]: DEBUG nova.virt.hardware [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 749.018855] env[69475]: DEBUG nova.virt.hardware [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 749.019028] env[69475]: DEBUG nova.virt.hardware [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 749.019269] env[69475]: DEBUG nova.virt.hardware [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 749.019419] env[69475]: DEBUG nova.virt.hardware [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 749.019568] env[69475]: DEBUG nova.virt.hardware [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 749.019775] env[69475]: DEBUG nova.virt.hardware [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 749.019931] env[69475]: DEBUG nova.virt.hardware [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 749.020109] env[69475]: DEBUG nova.virt.hardware [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 749.020273] env[69475]: DEBUG nova.virt.hardware [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 749.020443] env[69475]: DEBUG nova.virt.hardware [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 749.021629] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d029799a-dc86-4406-a953-928eb2e4a1e1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.030528] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2496bf6-882e-4352-b091-c0e3cf1dcfa9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.100673] env[69475]: DEBUG nova.scheduler.client.report [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 749.334567] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507927, 'name': CreateVM_Task, 'duration_secs': 0.324493} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.334746] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 749.335491] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.335630] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.335932] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 749.336563] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4917162f-1750-4f02-9ce9-25ec35656c21 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.341138] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 749.341138] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522b4560-2e3a-b59e-ad83-b789ef717ef4" [ 749.341138] env[69475]: _type = "Task" [ 749.341138] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.349332] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522b4560-2e3a-b59e-ad83-b789ef717ef4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.606793] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.645s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.607385] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 749.610325] env[69475]: DEBUG oslo_concurrency.lockutils [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.987s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.610325] env[69475]: DEBUG oslo_concurrency.lockutils [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.612271] env[69475]: DEBUG oslo_concurrency.lockutils [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.646s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.612406] env[69475]: DEBUG oslo_concurrency.lockutils [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.614074] env[69475]: DEBUG oslo_concurrency.lockutils [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.355s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.614256] env[69475]: DEBUG oslo_concurrency.lockutils [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.616133] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.978s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.617545] env[69475]: INFO nova.compute.claims [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 749.657129] env[69475]: INFO nova.scheduler.client.report [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Deleted allocations for instance b255f4d7-b177-4d6c-8a28-dcb5a179c1c0 [ 749.657129] env[69475]: INFO nova.scheduler.client.report [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Deleted allocations for instance 4465f156-09cc-4eba-90e4-be76f3010363 [ 749.675317] env[69475]: DEBUG nova.network.neutron [req-6998ced1-6421-4543-b014-2c0eb6ff0c30 req-44447e8e-3368-4159-ac4b-c9b34279235b service nova] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Updated VIF entry in instance network info cache for port f46198f7-e2cd-4d21-8b63-33c585b37c57. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 749.675514] env[69475]: DEBUG nova.network.neutron [req-6998ced1-6421-4543-b014-2c0eb6ff0c30 req-44447e8e-3368-4159-ac4b-c9b34279235b service nova] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Updating instance_info_cache with network_info: [{"id": "f46198f7-e2cd-4d21-8b63-33c585b37c57", "address": "fa:16:3e:db:21:da", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.144", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf46198f7-e2", "ovs_interfaceid": "f46198f7-e2cd-4d21-8b63-33c585b37c57", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.679182] env[69475]: INFO nova.scheduler.client.report [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Deleted allocations for instance e48e2cc1-7d60-457f-8f1c-649f0dda8cdb [ 749.832442] env[69475]: DEBUG nova.network.neutron [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Successfully updated port: fd636137-6583-4c7a-937a-701561e4141a {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 749.854355] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522b4560-2e3a-b59e-ad83-b789ef717ef4, 'name': SearchDatastore_Task, 'duration_secs': 0.010023} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.855269] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.855519] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 749.855751] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.855895] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.856088] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 749.856600] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2920b0b-a426-43e0-9d38-2a8885c8664d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.866508] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 749.866508] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 749.866660] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-344c1a9d-fa1c-4017-854b-f676113a769a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.872806] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 749.872806] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528aa45d-8d6b-5c48-308a-39e16ad6eed3" [ 749.872806] env[69475]: _type = "Task" [ 749.872806] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.880758] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528aa45d-8d6b-5c48-308a-39e16ad6eed3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.122120] env[69475]: DEBUG nova.compute.utils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 750.123351] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 750.123510] env[69475]: DEBUG nova.network.neutron [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 750.172330] env[69475]: DEBUG oslo_concurrency.lockutils [None req-538a0899-32a5-4a2c-872b-725b9b4330d2 tempest-ServersTestManualDisk-1552660528 tempest-ServersTestManualDisk-1552660528-project-member] Lock "b255f4d7-b177-4d6c-8a28-dcb5a179c1c0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.327s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.173256] env[69475]: DEBUG oslo_concurrency.lockutils [None req-15e26f90-c824-4012-83e0-4cb19ebff559 tempest-ServersAdminNegativeTestJSON-442332739 tempest-ServersAdminNegativeTestJSON-442332739-project-member] Lock "4465f156-09cc-4eba-90e4-be76f3010363" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.578s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.178655] env[69475]: DEBUG oslo_concurrency.lockutils [req-6998ced1-6421-4543-b014-2c0eb6ff0c30 req-44447e8e-3368-4159-ac4b-c9b34279235b service nova] Releasing lock "refresh_cache-b71882d4-537d-4a90-b43d-f8ac4ca0d90c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.188673] env[69475]: DEBUG oslo_concurrency.lockutils [None req-09552879-66fb-456c-8561-acd40b698676 tempest-AttachInterfacesV270Test-1312908406 tempest-AttachInterfacesV270Test-1312908406-project-member] Lock "e48e2cc1-7d60-457f-8f1c-649f0dda8cdb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 50.540s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.192088] env[69475]: DEBUG nova.policy [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd9b92fe261b0493fa6d31bb1f23024a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed51355b6daf4d4689f27ee4b8208618', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 750.335631] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "refresh_cache-4b3b53d1-82bf-40e7-9988-af7b51e9883a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.335781] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquired lock "refresh_cache-4b3b53d1-82bf-40e7-9988-af7b51e9883a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.335918] env[69475]: DEBUG nova.network.neutron [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 750.384809] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528aa45d-8d6b-5c48-308a-39e16ad6eed3, 'name': SearchDatastore_Task, 'duration_secs': 0.009447} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.384809] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bd635f3-fbb0-4519-87ac-b3b19dc0102d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.390449] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 750.390449] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520627d2-8cce-2b64-8d6b-cee0e6739ab7" [ 750.390449] env[69475]: _type = "Task" [ 750.390449] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.399308] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520627d2-8cce-2b64-8d6b-cee0e6739ab7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.601507] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Acquiring lock "8fbabf86-be9e-47ec-8c4c-adea4c68abe8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.601676] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Lock "8fbabf86-be9e-47ec-8c4c-adea4c68abe8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.601891] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Acquiring lock "8fbabf86-be9e-47ec-8c4c-adea4c68abe8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.602165] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Lock "8fbabf86-be9e-47ec-8c4c-adea4c68abe8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 750.602305] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Lock "8fbabf86-be9e-47ec-8c4c-adea4c68abe8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 750.605530] env[69475]: INFO nova.compute.manager [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Terminating instance [ 750.627547] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 750.875785] env[69475]: DEBUG nova.network.neutron [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.909524] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520627d2-8cce-2b64-8d6b-cee0e6739ab7, 'name': SearchDatastore_Task, 'duration_secs': 0.013361} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.910285] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.910285] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] b71882d4-537d-4a90-b43d-f8ac4ca0d90c/b71882d4-537d-4a90-b43d-f8ac4ca0d90c.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 750.912885] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6cc22bd6-30a0-4b8c-87c2-bde9046c73b5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.926775] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 750.926775] env[69475]: value = "task-3507929" [ 750.926775] env[69475]: _type = "Task" [ 750.926775] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.945877] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507929, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.068192] env[69475]: DEBUG nova.network.neutron [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Updating instance_info_cache with network_info: [{"id": "fd636137-6583-4c7a-937a-701561e4141a", "address": "fa:16:3e:82:54:fb", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd636137-65", "ovs_interfaceid": "fd636137-6583-4c7a-937a-701561e4141a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.112535] env[69475]: DEBUG nova.compute.manager [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 751.113507] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 751.113863] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07be17b8-d744-4ed9-a61e-47d151c29016 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.129022] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 751.129022] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09aee9fc-f233-4cb1-b59e-89d7440778f5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.142493] env[69475]: DEBUG oslo_vmware.api [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Waiting for the task: (returnval){ [ 751.142493] env[69475]: value = "task-3507930" [ 751.142493] env[69475]: _type = "Task" [ 751.142493] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.162278] env[69475]: DEBUG oslo_vmware.api [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': task-3507930, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.162622] env[69475]: DEBUG nova.network.neutron [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Successfully created port: 3b284ec5-f19b-4688-9bab-a6fb120cc7d8 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 751.228204] env[69475]: DEBUG nova.compute.manager [req-7ddafb63-5c97-42c8-8d8c-6da899af7efd req-01846676-c681-4962-aad9-0312ad4d62f0 service nova] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Received event network-vif-plugged-fd636137-6583-4c7a-937a-701561e4141a {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 751.228421] env[69475]: DEBUG oslo_concurrency.lockutils [req-7ddafb63-5c97-42c8-8d8c-6da899af7efd req-01846676-c681-4962-aad9-0312ad4d62f0 service nova] Acquiring lock "4b3b53d1-82bf-40e7-9988-af7b51e9883a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 751.228628] env[69475]: DEBUG oslo_concurrency.lockutils [req-7ddafb63-5c97-42c8-8d8c-6da899af7efd req-01846676-c681-4962-aad9-0312ad4d62f0 service nova] Lock "4b3b53d1-82bf-40e7-9988-af7b51e9883a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.228788] env[69475]: DEBUG oslo_concurrency.lockutils [req-7ddafb63-5c97-42c8-8d8c-6da899af7efd req-01846676-c681-4962-aad9-0312ad4d62f0 service nova] Lock "4b3b53d1-82bf-40e7-9988-af7b51e9883a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.228949] env[69475]: DEBUG nova.compute.manager [req-7ddafb63-5c97-42c8-8d8c-6da899af7efd req-01846676-c681-4962-aad9-0312ad4d62f0 service nova] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] No waiting events found dispatching network-vif-plugged-fd636137-6583-4c7a-937a-701561e4141a {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 751.230020] env[69475]: WARNING nova.compute.manager [req-7ddafb63-5c97-42c8-8d8c-6da899af7efd req-01846676-c681-4962-aad9-0312ad4d62f0 service nova] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Received unexpected event network-vif-plugged-fd636137-6583-4c7a-937a-701561e4141a for instance with vm_state building and task_state spawning. [ 751.231026] env[69475]: DEBUG nova.compute.manager [req-7ddafb63-5c97-42c8-8d8c-6da899af7efd req-01846676-c681-4962-aad9-0312ad4d62f0 service nova] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Received event network-changed-fd636137-6583-4c7a-937a-701561e4141a {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 751.231026] env[69475]: DEBUG nova.compute.manager [req-7ddafb63-5c97-42c8-8d8c-6da899af7efd req-01846676-c681-4962-aad9-0312ad4d62f0 service nova] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Refreshing instance network info cache due to event network-changed-fd636137-6583-4c7a-937a-701561e4141a. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 751.231026] env[69475]: DEBUG oslo_concurrency.lockutils [req-7ddafb63-5c97-42c8-8d8c-6da899af7efd req-01846676-c681-4962-aad9-0312ad4d62f0 service nova] Acquiring lock "refresh_cache-4b3b53d1-82bf-40e7-9988-af7b51e9883a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.310905] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5059ccc-4eb8-4240-a3dd-af438bc1915a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.321072] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c10a564-49d0-4465-9891-67ec5912042c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.364043] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc64ebaf-69f7-4823-b8a9-2bd5c57cb697 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.376332] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f38237-5324-44e1-aba2-152c2fca8af4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.391813] env[69475]: DEBUG nova.compute.provider_tree [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 751.446924] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507929, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.488728} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.446924] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] b71882d4-537d-4a90-b43d-f8ac4ca0d90c/b71882d4-537d-4a90-b43d-f8ac4ca0d90c.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 751.446924] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 751.446924] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a54cb658-ba2b-4623-9577-f2f766db472b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.452997] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 751.452997] env[69475]: value = "task-3507931" [ 751.452997] env[69475]: _type = "Task" [ 751.452997] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.463175] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507931, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.571071] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Releasing lock "refresh_cache-4b3b53d1-82bf-40e7-9988-af7b51e9883a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.571445] env[69475]: DEBUG nova.compute.manager [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Instance network_info: |[{"id": "fd636137-6583-4c7a-937a-701561e4141a", "address": "fa:16:3e:82:54:fb", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd636137-65", "ovs_interfaceid": "fd636137-6583-4c7a-937a-701561e4141a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 751.571782] env[69475]: DEBUG oslo_concurrency.lockutils [req-7ddafb63-5c97-42c8-8d8c-6da899af7efd req-01846676-c681-4962-aad9-0312ad4d62f0 service nova] Acquired lock "refresh_cache-4b3b53d1-82bf-40e7-9988-af7b51e9883a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.571963] env[69475]: DEBUG nova.network.neutron [req-7ddafb63-5c97-42c8-8d8c-6da899af7efd req-01846676-c681-4962-aad9-0312ad4d62f0 service nova] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Refreshing network info cache for port fd636137-6583-4c7a-937a-701561e4141a {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.573220] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:54:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fd636137-6583-4c7a-937a-701561e4141a', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 751.580914] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Creating folder: Project (dde7ecd407ae48f6a5d1b791df065d6e). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 751.581426] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8f125ef6-cee4-45b5-9f2d-0025fe8a573d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.594895] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Created folder: Project (dde7ecd407ae48f6a5d1b791df065d6e) in parent group-v700823. [ 751.595201] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Creating folder: Instances. Parent ref: group-v700952. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 751.595467] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-94729278-0eea-46fb-ae26-8b16cf6fd16d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.606751] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Created folder: Instances in parent group-v700952. [ 751.607032] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 751.607319] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 751.607574] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1748782a-d233-4356-8da3-6ae4df048767 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.628705] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 751.628705] env[69475]: value = "task-3507934" [ 751.628705] env[69475]: _type = "Task" [ 751.628705] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.636623] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507934, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.637695] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 751.652632] env[69475]: DEBUG oslo_vmware.api [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': task-3507930, 'name': PowerOffVM_Task, 'duration_secs': 0.306351} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.652851] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 751.653049] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 751.653345] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-023d7a3d-6747-48a7-b5a6-162bbbb4ef15 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.671764] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 751.671764] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 751.671900] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 751.672013] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 751.672151] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 751.672407] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 751.672541] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 751.672734] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 751.672865] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 751.673035] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 751.673380] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 751.674088] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a151d889-6cdc-47a0-920b-a2d9e2849998 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.684344] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7500f558-4b19-4c4f-9196-00189b0515fc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.734049] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 751.734291] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 751.734480] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Deleting the datastore file [datastore2] 8fbabf86-be9e-47ec-8c4c-adea4c68abe8 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 751.734743] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-28967fb8-eb08-4e97-9553-0d3fede4d828 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.743543] env[69475]: DEBUG oslo_vmware.api [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Waiting for the task: (returnval){ [ 751.743543] env[69475]: value = "task-3507936" [ 751.743543] env[69475]: _type = "Task" [ 751.743543] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.755387] env[69475]: DEBUG oslo_vmware.api [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': task-3507936, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.895553] env[69475]: DEBUG nova.scheduler.client.report [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 751.964621] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507931, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06919} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.964954] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 751.965913] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec9491b-d1b9-44f3-bc84-a0e2fde62e19 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.010152] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Reconfiguring VM instance instance-00000027 to attach disk [datastore2] b71882d4-537d-4a90-b43d-f8ac4ca0d90c/b71882d4-537d-4a90-b43d-f8ac4ca0d90c.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 752.010822] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c768c96e-9937-41df-a2cd-7de7c024c331 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.044341] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 752.044341] env[69475]: value = "task-3507937" [ 752.044341] env[69475]: _type = "Task" [ 752.044341] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.054820] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507937, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.141811] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507934, 'name': CreateVM_Task, 'duration_secs': 0.320301} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.145781] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 752.146691] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.147036] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.147466] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 752.150218] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d00d76b8-d25b-45f8-a5e8-493062e341b6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.154974] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 752.154974] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521d08fa-f274-bdf7-b485-3d6c449521aa" [ 752.154974] env[69475]: _type = "Task" [ 752.154974] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.165637] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521d08fa-f274-bdf7-b485-3d6c449521aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.257132] env[69475]: DEBUG oslo_vmware.api [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Task: {'id': task-3507936, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160738} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.257132] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 752.257132] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 752.257132] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 752.257132] env[69475]: INFO nova.compute.manager [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Took 1.14 seconds to destroy the instance on the hypervisor. [ 752.257132] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 752.257132] env[69475]: DEBUG nova.compute.manager [-] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 752.257132] env[69475]: DEBUG nova.network.neutron [-] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 752.403205] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.787s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.404039] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 752.409864] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.863s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 752.409864] env[69475]: INFO nova.compute.claims [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 752.556632] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507937, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.632278] env[69475]: DEBUG nova.network.neutron [req-7ddafb63-5c97-42c8-8d8c-6da899af7efd req-01846676-c681-4962-aad9-0312ad4d62f0 service nova] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Updated VIF entry in instance network info cache for port fd636137-6583-4c7a-937a-701561e4141a. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 752.632577] env[69475]: DEBUG nova.network.neutron [req-7ddafb63-5c97-42c8-8d8c-6da899af7efd req-01846676-c681-4962-aad9-0312ad4d62f0 service nova] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Updating instance_info_cache with network_info: [{"id": "fd636137-6583-4c7a-937a-701561e4141a", "address": "fa:16:3e:82:54:fb", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd636137-65", "ovs_interfaceid": "fd636137-6583-4c7a-937a-701561e4141a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.668637] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521d08fa-f274-bdf7-b485-3d6c449521aa, 'name': SearchDatastore_Task, 'duration_secs': 0.015084} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.669047] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.669473] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 752.669782] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.669988] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.670345] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 752.670577] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1d6fd4e3-1bda-4980-8c65-d0d2ae8c13e4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.682865] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 752.685756] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 752.685756] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-88315a71-de8e-4732-b9b5-a25feb5312c6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.694331] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 752.694331] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52262b7f-d67b-92de-4118-125c527b5d60" [ 752.694331] env[69475]: _type = "Task" [ 752.694331] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.709470] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52262b7f-d67b-92de-4118-125c527b5d60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.920824] env[69475]: DEBUG nova.compute.utils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 752.923896] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 752.927026] env[69475]: DEBUG nova.network.neutron [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 752.999933] env[69475]: DEBUG nova.policy [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd9b92fe261b0493fa6d31bb1f23024a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed51355b6daf4d4689f27ee4b8208618', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 753.060296] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507937, 'name': ReconfigVM_Task, 'duration_secs': 0.723278} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.060296] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Reconfigured VM instance instance-00000027 to attach disk [datastore2] b71882d4-537d-4a90-b43d-f8ac4ca0d90c/b71882d4-537d-4a90-b43d-f8ac4ca0d90c.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 753.060296] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b2318780-e00f-4424-a8c4-ea626ab65dc3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.068158] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 753.068158] env[69475]: value = "task-3507939" [ 753.068158] env[69475]: _type = "Task" [ 753.068158] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.078398] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507939, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.135602] env[69475]: DEBUG oslo_concurrency.lockutils [req-7ddafb63-5c97-42c8-8d8c-6da899af7efd req-01846676-c681-4962-aad9-0312ad4d62f0 service nova] Releasing lock "refresh_cache-4b3b53d1-82bf-40e7-9988-af7b51e9883a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.208988] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52262b7f-d67b-92de-4118-125c527b5d60, 'name': SearchDatastore_Task, 'duration_secs': 0.013823} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.210026] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-286238b3-10f2-4293-a598-6f218185cbcd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.217670] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 753.217670] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524fdcc4-806a-cc1c-7d6a-4d446cdce4e1" [ 753.217670] env[69475]: _type = "Task" [ 753.217670] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.231436] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524fdcc4-806a-cc1c-7d6a-4d446cdce4e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.428351] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 753.582935] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507939, 'name': Rename_Task, 'duration_secs': 0.154811} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.582935] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 753.582935] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b97f08c2-4744-4ff5-a297-313ad49ce897 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.594488] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 753.594488] env[69475]: value = "task-3507940" [ 753.594488] env[69475]: _type = "Task" [ 753.594488] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.607144] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507940, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.738166] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524fdcc4-806a-cc1c-7d6a-4d446cdce4e1, 'name': SearchDatastore_Task, 'duration_secs': 0.011083} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.738166] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.738166] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 4b3b53d1-82bf-40e7-9988-af7b51e9883a/4b3b53d1-82bf-40e7-9988-af7b51e9883a.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 753.738166] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9d9fd61-d48f-436d-927d-55d1b19bcb86 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.740578] env[69475]: DEBUG nova.network.neutron [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Successfully created port: 792cf213-fbce-47e9-8e02-2c4aa6a06738 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 753.747547] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 753.747547] env[69475]: value = "task-3507941" [ 753.747547] env[69475]: _type = "Task" [ 753.747547] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.756291] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3507941, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.839199] env[69475]: DEBUG nova.network.neutron [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Successfully updated port: 3b284ec5-f19b-4688-9bab-a6fb120cc7d8 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 753.882850] env[69475]: DEBUG nova.compute.manager [req-36ae2941-7510-4264-8b7a-67723ad16547 req-524e1008-aed8-4eaa-8644-bd10ee100ba1 service nova] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Received event network-vif-deleted-a4b5cd8c-fa71-4d57-ba30-262f752f04db {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 753.883597] env[69475]: INFO nova.compute.manager [req-36ae2941-7510-4264-8b7a-67723ad16547 req-524e1008-aed8-4eaa-8644-bd10ee100ba1 service nova] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Neutron deleted interface a4b5cd8c-fa71-4d57-ba30-262f752f04db; detaching it from the instance and deleting it from the info cache [ 753.883597] env[69475]: DEBUG nova.network.neutron [req-36ae2941-7510-4264-8b7a-67723ad16547 req-524e1008-aed8-4eaa-8644-bd10ee100ba1 service nova] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.066381] env[69475]: DEBUG nova.network.neutron [-] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 754.110717] env[69475]: DEBUG oslo_vmware.api [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507940, 'name': PowerOnVM_Task, 'duration_secs': 0.500211} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.111050] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 754.111283] env[69475]: INFO nova.compute.manager [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Took 7.81 seconds to spawn the instance on the hypervisor. [ 754.111526] env[69475]: DEBUG nova.compute.manager [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 754.112617] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae5bfb78-34e3-4bd7-b520-6478020e609a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.117281] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc90194-309f-465c-a8c2-31c5f5fbfb24 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.131317] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39858621-da3e-482d-a781-73a418bda758 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.176020] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d713b2b1-ab05-45ce-a6bc-74fab251ecd3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.187154] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3372ced6-f2f1-4fd5-bc52-22b9679e6c62 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.203796] env[69475]: DEBUG nova.compute.provider_tree [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 754.259630] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3507941, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.341114] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "refresh_cache-3e332e28-5db5-4f04-8a47-95406da16e21" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.341530] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquired lock "refresh_cache-3e332e28-5db5-4f04-8a47-95406da16e21" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.341802] env[69475]: DEBUG nova.network.neutron [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 754.390437] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a0536f2d-e4fb-42f6-9fd6-09abf71f87b7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.401559] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c212d28c-fb7b-4b8a-b938-55d7da12cd27 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.435662] env[69475]: DEBUG nova.compute.manager [req-36ae2941-7510-4264-8b7a-67723ad16547 req-524e1008-aed8-4eaa-8644-bd10ee100ba1 service nova] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Detach interface failed, port_id=a4b5cd8c-fa71-4d57-ba30-262f752f04db, reason: Instance 8fbabf86-be9e-47ec-8c4c-adea4c68abe8 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 754.443173] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 754.480060] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 754.480329] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 754.480486] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 754.480665] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 754.480809] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 754.480953] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 754.481176] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 754.481361] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 754.481543] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 754.481735] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 754.481917] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 754.482834] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e0c750-0e34-4ade-85e6-a76016bf21c5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.494335] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2af09a7-1e55-48df-9ecf-6d30960f2bee {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.569389] env[69475]: INFO nova.compute.manager [-] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Took 2.31 seconds to deallocate network for instance. [ 754.637420] env[69475]: INFO nova.compute.manager [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Took 58.46 seconds to build instance. [ 754.708266] env[69475]: DEBUG nova.scheduler.client.report [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 754.761879] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3507941, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558204} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.762182] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 4b3b53d1-82bf-40e7-9988-af7b51e9883a/4b3b53d1-82bf-40e7-9988-af7b51e9883a.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 754.762494] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 754.762890] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fbca156e-66e5-4432-ac2c-59c572014302 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.771520] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 754.771520] env[69475]: value = "task-3507942" [ 754.771520] env[69475]: _type = "Task" [ 754.771520] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.780658] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3507942, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.882829] env[69475]: DEBUG nova.network.neutron [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 755.043873] env[69475]: DEBUG nova.network.neutron [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Updating instance_info_cache with network_info: [{"id": "3b284ec5-f19b-4688-9bab-a6fb120cc7d8", "address": "fa:16:3e:ba:86:f5", "network": {"id": "eb5b4354-900c-49ff-b7a0-136b394c6750", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1780636412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ed51355b6daf4d4689f27ee4b8208618", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ada35c98-01a9-4352-98e4-1d20ba31f928", "external-id": "nsx-vlan-transportzone-242", "segmentation_id": 242, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b284ec5-f1", "ovs_interfaceid": "3b284ec5-f19b-4688-9bab-a6fb120cc7d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.077434] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.141911] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ef471ee-7d19-48f1-a311-76ca7e9064b3 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "b71882d4-537d-4a90-b43d-f8ac4ca0d90c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.498s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.213513] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.806s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.214317] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 755.217511] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 45.453s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.217716] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.220231] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.089s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.220799] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.222559] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.694s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.224431] env[69475]: INFO nova.compute.claims [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 755.258619] env[69475]: INFO nova.scheduler.client.report [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Deleted allocations for instance 6f530b86-2ed1-41db-929c-8a5dd61d931a [ 755.264880] env[69475]: INFO nova.scheduler.client.report [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Deleted allocations for instance 9e2d4d61-71ed-447a-b28e-c29c5bd8d763 [ 755.282813] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3507942, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070542} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.285731] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 755.285731] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f912e32-dac0-4909-a4aa-2ddc38d55e0d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.312301] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] 4b3b53d1-82bf-40e7-9988-af7b51e9883a/4b3b53d1-82bf-40e7-9988-af7b51e9883a.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 755.313090] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f5e20f0-cfb0-49ca-bc9d-82bcfa2eff43 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.341026] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 755.341026] env[69475]: value = "task-3507943" [ 755.341026] env[69475]: _type = "Task" [ 755.341026] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.348624] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3507943, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.548333] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Releasing lock "refresh_cache-3e332e28-5db5-4f04-8a47-95406da16e21" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.548715] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Instance network_info: |[{"id": "3b284ec5-f19b-4688-9bab-a6fb120cc7d8", "address": "fa:16:3e:ba:86:f5", "network": {"id": "eb5b4354-900c-49ff-b7a0-136b394c6750", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1780636412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true}}], "meta": {"injected": false, "tenant_id": "ed51355b6daf4d4689f27ee4b8208618", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ada35c98-01a9-4352-98e4-1d20ba31f928", "external-id": "nsx-vlan-transportzone-242", "segmentation_id": 242, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b284ec5-f1", "ovs_interfaceid": "3b284ec5-f19b-4688-9bab-a6fb120cc7d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 755.549196] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:86:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ada35c98-01a9-4352-98e4-1d20ba31f928', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3b284ec5-f19b-4688-9bab-a6fb120cc7d8', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 755.558102] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Creating folder: Project (ed51355b6daf4d4689f27ee4b8208618). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 755.559180] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-caf4d131-985f-4169-b4d6-b6b899f100e0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.574833] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Created folder: Project (ed51355b6daf4d4689f27ee4b8208618) in parent group-v700823. [ 755.575290] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Creating folder: Instances. Parent ref: group-v700955. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 755.575813] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-460718a8-9b52-4b88-be27-56928acf1e09 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.583761] env[69475]: DEBUG nova.compute.manager [req-b240d2f6-adc8-4050-a2b6-c0093bf9dfd0 req-d356f8f3-cd55-40a1-aa5e-144b19bc5d4b service nova] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Received event network-vif-plugged-792cf213-fbce-47e9-8e02-2c4aa6a06738 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 755.584671] env[69475]: DEBUG oslo_concurrency.lockutils [req-b240d2f6-adc8-4050-a2b6-c0093bf9dfd0 req-d356f8f3-cd55-40a1-aa5e-144b19bc5d4b service nova] Acquiring lock "712e93b6-e797-4b9f-b39b-33373cede403-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.585450] env[69475]: DEBUG oslo_concurrency.lockutils [req-b240d2f6-adc8-4050-a2b6-c0093bf9dfd0 req-d356f8f3-cd55-40a1-aa5e-144b19bc5d4b service nova] Lock "712e93b6-e797-4b9f-b39b-33373cede403-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.586056] env[69475]: DEBUG oslo_concurrency.lockutils [req-b240d2f6-adc8-4050-a2b6-c0093bf9dfd0 req-d356f8f3-cd55-40a1-aa5e-144b19bc5d4b service nova] Lock "712e93b6-e797-4b9f-b39b-33373cede403-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.586352] env[69475]: DEBUG nova.compute.manager [req-b240d2f6-adc8-4050-a2b6-c0093bf9dfd0 req-d356f8f3-cd55-40a1-aa5e-144b19bc5d4b service nova] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] No waiting events found dispatching network-vif-plugged-792cf213-fbce-47e9-8e02-2c4aa6a06738 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 755.586701] env[69475]: WARNING nova.compute.manager [req-b240d2f6-adc8-4050-a2b6-c0093bf9dfd0 req-d356f8f3-cd55-40a1-aa5e-144b19bc5d4b service nova] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Received unexpected event network-vif-plugged-792cf213-fbce-47e9-8e02-2c4aa6a06738 for instance with vm_state building and task_state spawning. [ 755.588688] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Created folder: Instances in parent group-v700955. [ 755.589026] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 755.589279] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 755.589870] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-622e3da5-143a-43e2-aa08-616a98c38f3e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.614505] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 755.614505] env[69475]: value = "task-3507946" [ 755.614505] env[69475]: _type = "Task" [ 755.614505] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.625537] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507946, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.645080] env[69475]: DEBUG nova.compute.manager [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 755.731046] env[69475]: DEBUG nova.compute.utils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 755.732765] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 755.732947] env[69475]: DEBUG nova.network.neutron [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 755.769964] env[69475]: DEBUG nova.network.neutron [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Successfully updated port: 792cf213-fbce-47e9-8e02-2c4aa6a06738 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 755.779018] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ffe663e3-a8a6-4b75-a1b4-f8add7e47fbd tempest-InstanceActionsNegativeTestJSON-708038306 tempest-InstanceActionsNegativeTestJSON-708038306-project-member] Lock "6f530b86-2ed1-41db-929c-8a5dd61d931a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.421s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.781435] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e28ae679-55cc-42ad-87cf-5fc76f371093 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "9e2d4d61-71ed-447a-b28e-c29c5bd8d763" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.809s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.833323] env[69475]: DEBUG nova.policy [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd9b92fe261b0493fa6d31bb1f23024a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed51355b6daf4d4689f27ee4b8208618', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 755.851905] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3507943, 'name': ReconfigVM_Task, 'duration_secs': 0.283129} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.852868] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Reconfigured VM instance instance-00000028 to attach disk [datastore2] 4b3b53d1-82bf-40e7-9988-af7b51e9883a/4b3b53d1-82bf-40e7-9988-af7b51e9883a.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 755.853101] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-39882853-0c75-44e8-9a6a-49a915c598b8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.861126] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 755.861126] env[69475]: value = "task-3507947" [ 755.861126] env[69475]: _type = "Task" [ 755.861126] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.871964] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3507947, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.021269] env[69475]: DEBUG nova.compute.manager [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 756.022314] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fcbc5d4-1ddf-48d4-92f2-09bb93a982cf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.126740] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507946, 'name': CreateVM_Task, 'duration_secs': 0.354619} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.126925] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 756.127777] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.127946] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.128287] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 756.128830] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-facad19b-26d7-4594-a2be-38638866d2a0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.134486] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 756.134486] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c03fda-0104-0042-76b5-67d1f8cfe682" [ 756.134486] env[69475]: _type = "Task" [ 756.134486] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.144275] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c03fda-0104-0042-76b5-67d1f8cfe682, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.172657] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.198693] env[69475]: DEBUG nova.compute.manager [req-4dadad3d-c9d2-4d0d-b088-d403fd096e46 req-03e32e85-b730-4bd9-86d9-e78087b597e2 service nova] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Received event network-vif-plugged-3b284ec5-f19b-4688-9bab-a6fb120cc7d8 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 756.198911] env[69475]: DEBUG oslo_concurrency.lockutils [req-4dadad3d-c9d2-4d0d-b088-d403fd096e46 req-03e32e85-b730-4bd9-86d9-e78087b597e2 service nova] Acquiring lock "3e332e28-5db5-4f04-8a47-95406da16e21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 756.199139] env[69475]: DEBUG oslo_concurrency.lockutils [req-4dadad3d-c9d2-4d0d-b088-d403fd096e46 req-03e32e85-b730-4bd9-86d9-e78087b597e2 service nova] Lock "3e332e28-5db5-4f04-8a47-95406da16e21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 756.199392] env[69475]: DEBUG oslo_concurrency.lockutils [req-4dadad3d-c9d2-4d0d-b088-d403fd096e46 req-03e32e85-b730-4bd9-86d9-e78087b597e2 service nova] Lock "3e332e28-5db5-4f04-8a47-95406da16e21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 756.199577] env[69475]: DEBUG nova.compute.manager [req-4dadad3d-c9d2-4d0d-b088-d403fd096e46 req-03e32e85-b730-4bd9-86d9-e78087b597e2 service nova] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] No waiting events found dispatching network-vif-plugged-3b284ec5-f19b-4688-9bab-a6fb120cc7d8 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 756.199738] env[69475]: WARNING nova.compute.manager [req-4dadad3d-c9d2-4d0d-b088-d403fd096e46 req-03e32e85-b730-4bd9-86d9-e78087b597e2 service nova] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Received unexpected event network-vif-plugged-3b284ec5-f19b-4688-9bab-a6fb120cc7d8 for instance with vm_state building and task_state spawning. [ 756.199892] env[69475]: DEBUG nova.compute.manager [req-4dadad3d-c9d2-4d0d-b088-d403fd096e46 req-03e32e85-b730-4bd9-86d9-e78087b597e2 service nova] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Received event network-changed-3b284ec5-f19b-4688-9bab-a6fb120cc7d8 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 756.200142] env[69475]: DEBUG nova.compute.manager [req-4dadad3d-c9d2-4d0d-b088-d403fd096e46 req-03e32e85-b730-4bd9-86d9-e78087b597e2 service nova] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Refreshing instance network info cache due to event network-changed-3b284ec5-f19b-4688-9bab-a6fb120cc7d8. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 756.200363] env[69475]: DEBUG oslo_concurrency.lockutils [req-4dadad3d-c9d2-4d0d-b088-d403fd096e46 req-03e32e85-b730-4bd9-86d9-e78087b597e2 service nova] Acquiring lock "refresh_cache-3e332e28-5db5-4f04-8a47-95406da16e21" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.200502] env[69475]: DEBUG oslo_concurrency.lockutils [req-4dadad3d-c9d2-4d0d-b088-d403fd096e46 req-03e32e85-b730-4bd9-86d9-e78087b597e2 service nova] Acquired lock "refresh_cache-3e332e28-5db5-4f04-8a47-95406da16e21" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.200655] env[69475]: DEBUG nova.network.neutron [req-4dadad3d-c9d2-4d0d-b088-d403fd096e46 req-03e32e85-b730-4bd9-86d9-e78087b597e2 service nova] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Refreshing network info cache for port 3b284ec5-f19b-4688-9bab-a6fb120cc7d8 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 756.236701] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 756.281175] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "refresh_cache-712e93b6-e797-4b9f-b39b-33373cede403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.281355] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquired lock "refresh_cache-712e93b6-e797-4b9f-b39b-33373cede403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.282165] env[69475]: DEBUG nova.network.neutron [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 756.380636] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3507947, 'name': Rename_Task, 'duration_secs': 0.154306} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.380926] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 756.382299] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-77d67622-ac56-4fd7-9a5f-3dae2071a1e1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.394359] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 756.394359] env[69475]: value = "task-3507948" [ 756.394359] env[69475]: _type = "Task" [ 756.394359] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.408752] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3507948, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.538466] env[69475]: INFO nova.compute.manager [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] instance snapshotting [ 756.546736] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5bb283-6ec9-4e18-839c-4193acc40f00 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.574163] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877fbc59-c506-4d2e-a588-4b7a076c95ab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.578912] env[69475]: DEBUG nova.network.neutron [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Successfully created port: 74bc91c0-20e1-4de1-8433-333a88443441 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 756.655349] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c03fda-0104-0042-76b5-67d1f8cfe682, 'name': SearchDatastore_Task, 'duration_secs': 0.035803} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.656022] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.656345] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 756.657567] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.657567] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.661191] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 756.661860] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83ade19f-745e-4ff9-8d5d-dfccf47b44ce {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.679356] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 756.679571] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 756.680519] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2142fdbf-1474-4c81-afef-2acd64668a81 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.689186] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 756.689186] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524abe84-76f7-f8d0-5019-1bb74ff22632" [ 756.689186] env[69475]: _type = "Task" [ 756.689186] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.702324] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524abe84-76f7-f8d0-5019-1bb74ff22632, 'name': SearchDatastore_Task, 'duration_secs': 0.010875} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.705874] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54f4ac83-36f1-469f-86e1-fac8647225d4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.717497] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 756.717497] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e708c1-5bd8-0b96-b387-2c7a4bc4d7a5" [ 756.717497] env[69475]: _type = "Task" [ 756.717497] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.734658] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e708c1-5bd8-0b96-b387-2c7a4bc4d7a5, 'name': SearchDatastore_Task, 'duration_secs': 0.014101} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.734949] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 756.735407] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 3e332e28-5db5-4f04-8a47-95406da16e21/3e332e28-5db5-4f04-8a47-95406da16e21.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 756.735774] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bfd18681-2acb-4972-9a5b-64400f330f5e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.750666] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 756.750666] env[69475]: value = "task-3507949" [ 756.750666] env[69475]: _type = "Task" [ 756.750666] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.762320] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507949, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.862221] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9438942-d60a-4ad0-9fb6-9ffb71656a69 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.870913] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b962a8c0-53bb-400f-b1bf-de03be7e35db {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.915058] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-489d29ab-9a88-4a53-8164-ea2fd829d4e8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.922600] env[69475]: DEBUG nova.network.neutron [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 756.932524] env[69475]: DEBUG oslo_vmware.api [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3507948, 'name': PowerOnVM_Task, 'duration_secs': 0.482996} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.933051] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 756.933257] env[69475]: INFO nova.compute.manager [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Took 7.94 seconds to spawn the instance on the hypervisor. [ 756.933828] env[69475]: DEBUG nova.compute.manager [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 756.935477] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd2768e4-8108-484f-8f08-9a2b4e5a554d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.939801] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0f1add-4700-49fe-840e-d0a01e608424 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.959767] env[69475]: DEBUG nova.compute.provider_tree [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 757.096169] env[69475]: DEBUG nova.network.neutron [req-4dadad3d-c9d2-4d0d-b088-d403fd096e46 req-03e32e85-b730-4bd9-86d9-e78087b597e2 service nova] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Updated VIF entry in instance network info cache for port 3b284ec5-f19b-4688-9bab-a6fb120cc7d8. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 757.096641] env[69475]: DEBUG nova.network.neutron [req-4dadad3d-c9d2-4d0d-b088-d403fd096e46 req-03e32e85-b730-4bd9-86d9-e78087b597e2 service nova] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Updating instance_info_cache with network_info: [{"id": "3b284ec5-f19b-4688-9bab-a6fb120cc7d8", "address": "fa:16:3e:ba:86:f5", "network": {"id": "eb5b4354-900c-49ff-b7a0-136b394c6750", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1780636412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "ed51355b6daf4d4689f27ee4b8208618", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ada35c98-01a9-4352-98e4-1d20ba31f928", "external-id": "nsx-vlan-transportzone-242", "segmentation_id": 242, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3b284ec5-f1", "ovs_interfaceid": "3b284ec5-f19b-4688-9bab-a6fb120cc7d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.100325] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 757.100325] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-57037bf8-1c15-4de6-b122-c0a8bec10f7f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.111764] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 757.111764] env[69475]: value = "task-3507950" [ 757.111764] env[69475]: _type = "Task" [ 757.111764] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.125510] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507950, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.188700] env[69475]: DEBUG oslo_concurrency.lockutils [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Acquiring lock "8cc0636c-84af-4f68-bec8-1493b421a605" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 757.190106] env[69475]: DEBUG oslo_concurrency.lockutils [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Lock "8cc0636c-84af-4f68-bec8-1493b421a605" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.249639] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 757.265332] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507949, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.285493] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 757.285620] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 757.285756] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 757.285932] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 757.286300] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 757.286496] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 757.286718] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 757.286931] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 757.287052] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 757.287303] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 757.287512] env[69475]: DEBUG nova.virt.hardware [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 757.288297] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9dee571-6e5b-4b75-bbb1-0062f04da364 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.299697] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24670232-ad92-4814-a42d-24db6b374334 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.464488] env[69475]: DEBUG nova.scheduler.client.report [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 757.481423] env[69475]: INFO nova.compute.manager [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Took 58.05 seconds to build instance. [ 757.497681] env[69475]: DEBUG nova.network.neutron [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Updating instance_info_cache with network_info: [{"id": "792cf213-fbce-47e9-8e02-2c4aa6a06738", "address": "fa:16:3e:5b:58:fd", "network": {"id": "eb5b4354-900c-49ff-b7a0-136b394c6750", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1780636412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "ed51355b6daf4d4689f27ee4b8208618", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ada35c98-01a9-4352-98e4-1d20ba31f928", "external-id": "nsx-vlan-transportzone-242", "segmentation_id": 242, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap792cf213-fb", "ovs_interfaceid": "792cf213-fbce-47e9-8e02-2c4aa6a06738", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.600916] env[69475]: DEBUG oslo_concurrency.lockutils [req-4dadad3d-c9d2-4d0d-b088-d403fd096e46 req-03e32e85-b730-4bd9-86d9-e78087b597e2 service nova] Releasing lock "refresh_cache-3e332e28-5db5-4f04-8a47-95406da16e21" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.622160] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507950, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.766626] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507949, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.706101} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.766626] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 3e332e28-5db5-4f04-8a47-95406da16e21/3e332e28-5db5-4f04-8a47-95406da16e21.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 757.766626] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 757.770033] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f50a7898-c045-4dce-b227-43ebd400026d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.775325] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 757.775325] env[69475]: value = "task-3507951" [ 757.775325] env[69475]: _type = "Task" [ 757.775325] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.786175] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507951, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.978391] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.756s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.979735] env[69475]: DEBUG nova.compute.manager [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 757.984768] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 36.436s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.985454] env[69475]: DEBUG nova.objects.instance [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69475) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 757.990262] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e8fd25-b759-423a-8795-ad8a23eeeae4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "4b3b53d1-82bf-40e7-9988-af7b51e9883a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.217s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.001546] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Releasing lock "refresh_cache-712e93b6-e797-4b9f-b39b-33373cede403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 758.001546] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Instance network_info: |[{"id": "792cf213-fbce-47e9-8e02-2c4aa6a06738", "address": "fa:16:3e:5b:58:fd", "network": {"id": "eb5b4354-900c-49ff-b7a0-136b394c6750", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1780636412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "ed51355b6daf4d4689f27ee4b8208618", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ada35c98-01a9-4352-98e4-1d20ba31f928", "external-id": "nsx-vlan-transportzone-242", "segmentation_id": 242, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap792cf213-fb", "ovs_interfaceid": "792cf213-fbce-47e9-8e02-2c4aa6a06738", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 758.001546] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:58:fd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ada35c98-01a9-4352-98e4-1d20ba31f928', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '792cf213-fbce-47e9-8e02-2c4aa6a06738', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 758.010753] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 758.010753] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 758.011092] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b9347336-c762-4b0e-abd1-e0ea1e982b65 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.037323] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 758.037323] env[69475]: value = "task-3507952" [ 758.037323] env[69475]: _type = "Task" [ 758.037323] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.052845] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507952, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.124949] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507950, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.254500] env[69475]: DEBUG nova.compute.manager [req-314fb13a-8bd1-4cef-bd67-656de8089127 req-857717c1-1a75-4a37-b839-bc15913a36a6 service nova] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Received event network-changed-792cf213-fbce-47e9-8e02-2c4aa6a06738 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 758.254500] env[69475]: DEBUG nova.compute.manager [req-314fb13a-8bd1-4cef-bd67-656de8089127 req-857717c1-1a75-4a37-b839-bc15913a36a6 service nova] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Refreshing instance network info cache due to event network-changed-792cf213-fbce-47e9-8e02-2c4aa6a06738. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 758.254500] env[69475]: DEBUG oslo_concurrency.lockutils [req-314fb13a-8bd1-4cef-bd67-656de8089127 req-857717c1-1a75-4a37-b839-bc15913a36a6 service nova] Acquiring lock "refresh_cache-712e93b6-e797-4b9f-b39b-33373cede403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.254500] env[69475]: DEBUG oslo_concurrency.lockutils [req-314fb13a-8bd1-4cef-bd67-656de8089127 req-857717c1-1a75-4a37-b839-bc15913a36a6 service nova] Acquired lock "refresh_cache-712e93b6-e797-4b9f-b39b-33373cede403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.254500] env[69475]: DEBUG nova.network.neutron [req-314fb13a-8bd1-4cef-bd67-656de8089127 req-857717c1-1a75-4a37-b839-bc15913a36a6 service nova] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Refreshing network info cache for port 792cf213-fbce-47e9-8e02-2c4aa6a06738 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 758.293168] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507951, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.214901} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.293558] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 758.294973] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd4f9385-41c3-4209-a272-ebdb5d148732 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.325091] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 3e332e28-5db5-4f04-8a47-95406da16e21/3e332e28-5db5-4f04-8a47-95406da16e21.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 758.326140] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c23abc6-aa82-4043-9f3a-f1e0e6e69b8a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.351978] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 758.351978] env[69475]: value = "task-3507953" [ 758.351978] env[69475]: _type = "Task" [ 758.351978] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.362344] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507953, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.487259] env[69475]: DEBUG nova.compute.utils [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 758.488860] env[69475]: DEBUG nova.compute.manager [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 758.489091] env[69475]: DEBUG nova.network.neutron [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 758.496200] env[69475]: DEBUG nova.compute.manager [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 758.551689] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507952, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.588296] env[69475]: DEBUG nova.policy [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb93c2f0a3554be8b25cde370a4083ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de2b24bdabce45a7884bdce4ed781c79', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 758.625127] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507950, 'name': CreateSnapshot_Task, 'duration_secs': 1.446095} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.625187] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 758.625995] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7cc0c48-6861-429d-bad7-9328dce26cfb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.867953] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507953, 'name': ReconfigVM_Task, 'duration_secs': 0.315012} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.873933] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 3e332e28-5db5-4f04-8a47-95406da16e21/3e332e28-5db5-4f04-8a47-95406da16e21.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 758.874206] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-99164972-3a1f-4ecb-90bf-ab9ae96dcf79 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.883854] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 758.883854] env[69475]: value = "task-3507954" [ 758.883854] env[69475]: _type = "Task" [ 758.883854] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.896093] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507954, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.000740] env[69475]: DEBUG nova.compute.manager [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 759.006033] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f20f7b7e-c248-4a5c-b520-59a3dd052589 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.009828] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.867s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.012599] env[69475]: INFO nova.compute.claims [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 759.033020] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.054958] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507952, 'name': CreateVM_Task, 'duration_secs': 0.608525} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.055167] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 759.055924] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.056106] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 759.056412] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 759.056671] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-085b0df3-04c6-4c21-8852-32ef157fcfff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.066168] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 759.066168] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52336245-b8e6-abea-f459-036be62d7425" [ 759.066168] env[69475]: _type = "Task" [ 759.066168] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.073260] env[69475]: DEBUG nova.network.neutron [req-314fb13a-8bd1-4cef-bd67-656de8089127 req-857717c1-1a75-4a37-b839-bc15913a36a6 service nova] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Updated VIF entry in instance network info cache for port 792cf213-fbce-47e9-8e02-2c4aa6a06738. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 759.078020] env[69475]: DEBUG nova.network.neutron [req-314fb13a-8bd1-4cef-bd67-656de8089127 req-857717c1-1a75-4a37-b839-bc15913a36a6 service nova] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Updating instance_info_cache with network_info: [{"id": "792cf213-fbce-47e9-8e02-2c4aa6a06738", "address": "fa:16:3e:5b:58:fd", "network": {"id": "eb5b4354-900c-49ff-b7a0-136b394c6750", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1780636412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "ed51355b6daf4d4689f27ee4b8208618", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ada35c98-01a9-4352-98e4-1d20ba31f928", "external-id": "nsx-vlan-transportzone-242", "segmentation_id": 242, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap792cf213-fb", "ovs_interfaceid": "792cf213-fbce-47e9-8e02-2c4aa6a06738", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.084581] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52336245-b8e6-abea-f459-036be62d7425, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.154514] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 759.154851] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2a225a5b-1022-45df-8ba4-9e16a62ceaab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.171197] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 759.171197] env[69475]: value = "task-3507955" [ 759.171197] env[69475]: _type = "Task" [ 759.171197] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.190089] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507955, 'name': CloneVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.249574] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "86647493-8b2c-46bd-94d3-c973e843f778" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.249827] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "86647493-8b2c-46bd-94d3-c973e843f778" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.396985] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507954, 'name': Rename_Task, 'duration_secs': 0.157272} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.397493] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 759.397844] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-825c62cf-e327-4ac1-a888-142d4f262452 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.408969] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 759.408969] env[69475]: value = "task-3507956" [ 759.408969] env[69475]: _type = "Task" [ 759.408969] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.419187] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507956, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.577900] env[69475]: DEBUG oslo_concurrency.lockutils [req-314fb13a-8bd1-4cef-bd67-656de8089127 req-857717c1-1a75-4a37-b839-bc15913a36a6 service nova] Releasing lock "refresh_cache-712e93b6-e797-4b9f-b39b-33373cede403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 759.578463] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52336245-b8e6-abea-f459-036be62d7425, 'name': SearchDatastore_Task, 'duration_secs': 0.023555} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.579340] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 759.579340] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 759.580119] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.580119] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 759.580119] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 759.580350] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a089c864-90c7-4822-826f-f2288700211b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.590967] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 759.591176] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 759.591985] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d69f2e01-9496-4690-acf3-c45180ca917b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.598445] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 759.598445] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5234811b-ac20-42fa-e56f-c590a6026401" [ 759.598445] env[69475]: _type = "Task" [ 759.598445] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.613054] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5234811b-ac20-42fa-e56f-c590a6026401, 'name': SearchDatastore_Task} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.614200] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d22633f5-33ff-41e5-9594-8910700a34f3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.620498] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 759.620498] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5259c648-5745-329a-b4ac-7608454cdff4" [ 759.620498] env[69475]: _type = "Task" [ 759.620498] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.631164] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5259c648-5745-329a-b4ac-7608454cdff4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.682780] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507955, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.780294] env[69475]: DEBUG nova.compute.manager [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Stashing vm_state: active {{(pid=69475) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 759.784038] env[69475]: DEBUG nova.network.neutron [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Successfully created port: 4059da75-efc8-42ee-90b1-8202220d1621 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 759.809010] env[69475]: DEBUG nova.compute.manager [req-1bcbc7fe-fc7b-428b-8a08-8cfea8c9fba0 req-e0d3c9b2-4a92-40f0-b519-b1669f44dd3f service nova] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Received event network-vif-plugged-74bc91c0-20e1-4de1-8433-333a88443441 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 759.810477] env[69475]: DEBUG oslo_concurrency.lockutils [req-1bcbc7fe-fc7b-428b-8a08-8cfea8c9fba0 req-e0d3c9b2-4a92-40f0-b519-b1669f44dd3f service nova] Acquiring lock "daef2117-0d9f-4c9e-99e7-1e8a65aa1f22-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.810477] env[69475]: DEBUG oslo_concurrency.lockutils [req-1bcbc7fe-fc7b-428b-8a08-8cfea8c9fba0 req-e0d3c9b2-4a92-40f0-b519-b1669f44dd3f service nova] Lock "daef2117-0d9f-4c9e-99e7-1e8a65aa1f22-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.810477] env[69475]: DEBUG oslo_concurrency.lockutils [req-1bcbc7fe-fc7b-428b-8a08-8cfea8c9fba0 req-e0d3c9b2-4a92-40f0-b519-b1669f44dd3f service nova] Lock "daef2117-0d9f-4c9e-99e7-1e8a65aa1f22-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.810477] env[69475]: DEBUG nova.compute.manager [req-1bcbc7fe-fc7b-428b-8a08-8cfea8c9fba0 req-e0d3c9b2-4a92-40f0-b519-b1669f44dd3f service nova] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] No waiting events found dispatching network-vif-plugged-74bc91c0-20e1-4de1-8433-333a88443441 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 759.810477] env[69475]: WARNING nova.compute.manager [req-1bcbc7fe-fc7b-428b-8a08-8cfea8c9fba0 req-e0d3c9b2-4a92-40f0-b519-b1669f44dd3f service nova] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Received unexpected event network-vif-plugged-74bc91c0-20e1-4de1-8433-333a88443441 for instance with vm_state building and task_state spawning. [ 759.919856] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507956, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.957018] env[69475]: DEBUG nova.network.neutron [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Successfully updated port: 74bc91c0-20e1-4de1-8433-333a88443441 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 760.016788] env[69475]: DEBUG nova.compute.manager [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 760.060987] env[69475]: DEBUG nova.virt.hardware [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 760.061498] env[69475]: DEBUG nova.virt.hardware [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 760.061789] env[69475]: DEBUG nova.virt.hardware [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 760.062124] env[69475]: DEBUG nova.virt.hardware [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 760.062390] env[69475]: DEBUG nova.virt.hardware [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 760.062641] env[69475]: DEBUG nova.virt.hardware [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 760.062980] env[69475]: DEBUG nova.virt.hardware [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 760.063269] env[69475]: DEBUG nova.virt.hardware [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 760.064239] env[69475]: DEBUG nova.virt.hardware [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 760.064340] env[69475]: DEBUG nova.virt.hardware [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 760.065046] env[69475]: DEBUG nova.virt.hardware [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 760.065462] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc72e829-972a-438c-b3b3-be6205ccbcfd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.078532] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92a5ab8c-127c-49be-a930-bc0fac59c578 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.136054] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5259c648-5745-329a-b4ac-7608454cdff4, 'name': SearchDatastore_Task, 'duration_secs': 0.011532} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.136706] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.136706] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 712e93b6-e797-4b9f-b39b-33373cede403/712e93b6-e797-4b9f-b39b-33373cede403.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 760.136902] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-727a0d80-19ee-421b-a233-916ca558b7ef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.144628] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 760.144628] env[69475]: value = "task-3507957" [ 760.144628] env[69475]: _type = "Task" [ 760.144628] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.156676] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507957, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.188891] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507955, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.309814] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.427397] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507956, 'name': PowerOnVM_Task, 'duration_secs': 0.528203} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.427512] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 760.428211] env[69475]: INFO nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Took 8.79 seconds to spawn the instance on the hypervisor. [ 760.428211] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 760.428858] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75496807-a0b9-48d2-8362-06cfb19fe7f4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.460179] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "refresh_cache-daef2117-0d9f-4c9e-99e7-1e8a65aa1f22" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.460252] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquired lock "refresh_cache-daef2117-0d9f-4c9e-99e7-1e8a65aa1f22" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.460398] env[69475]: DEBUG nova.network.neutron [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 760.505257] env[69475]: DEBUG nova.objects.instance [None req-d55850ea-058d-4364-87fd-e58f3cfb46a9 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Lazy-loading 'flavor' on Instance uuid 00ba5cd8-3516-4059-bcda-c2d01e165e07 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 760.660046] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507957, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.690156] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507955, 'name': CloneVM_Task} progress is 95%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.714454] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11323676-c329-461d-b584-43e8e812420e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.724068] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29becc50-a95b-429f-8c98-08c08b6a3ff8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.761537] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68bf54c6-d22d-4b91-b9e7-5c6a4c2454f5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.771523] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c59c27-12d1-405e-ac12-2a7f99b698b1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.788890] env[69475]: DEBUG nova.compute.provider_tree [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.956203] env[69475]: INFO nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Took 60.56 seconds to build instance. [ 761.010036] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d55850ea-058d-4364-87fd-e58f3cfb46a9 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Acquiring lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.010036] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d55850ea-058d-4364-87fd-e58f3cfb46a9 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Acquired lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.018659] env[69475]: DEBUG nova.network.neutron [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 761.158737] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507957, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.735249} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.158737] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 712e93b6-e797-4b9f-b39b-33373cede403/712e93b6-e797-4b9f-b39b-33373cede403.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 761.159916] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 761.160172] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e4ca4b5f-de4c-4a38-801a-736843cd2a02 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.168599] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 761.168599] env[69475]: value = "task-3507958" [ 761.168599] env[69475]: _type = "Task" [ 761.168599] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.181461] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507958, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.188470] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507955, 'name': CloneVM_Task, 'duration_secs': 1.68273} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.188754] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Created linked-clone VM from snapshot [ 761.189642] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1305043a-689a-4532-9c67-94187176f562 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.199141] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Uploading image a7eee3fc-bdd8-4636-932c-95c9509210a5 {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 761.235097] env[69475]: DEBUG oslo_vmware.rw_handles [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 761.235097] env[69475]: value = "vm-700960" [ 761.235097] env[69475]: _type = "VirtualMachine" [ 761.235097] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 761.239735] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4ffda6a4-5b79-40db-b9d9-8b162eff051c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.248034] env[69475]: DEBUG oslo_vmware.rw_handles [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lease: (returnval){ [ 761.248034] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52713828-70a9-4480-7c32-390d715b17f0" [ 761.248034] env[69475]: _type = "HttpNfcLease" [ 761.248034] env[69475]: } obtained for exporting VM: (result){ [ 761.248034] env[69475]: value = "vm-700960" [ 761.248034] env[69475]: _type = "VirtualMachine" [ 761.248034] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 761.248390] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the lease: (returnval){ [ 761.248390] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52713828-70a9-4480-7c32-390d715b17f0" [ 761.248390] env[69475]: _type = "HttpNfcLease" [ 761.248390] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 761.256013] env[69475]: DEBUG nova.network.neutron [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Updating instance_info_cache with network_info: [{"id": "74bc91c0-20e1-4de1-8433-333a88443441", "address": "fa:16:3e:88:95:5f", "network": {"id": "eb5b4354-900c-49ff-b7a0-136b394c6750", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1780636412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "ed51355b6daf4d4689f27ee4b8208618", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ada35c98-01a9-4352-98e4-1d20ba31f928", "external-id": "nsx-vlan-transportzone-242", "segmentation_id": 242, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74bc91c0-20", "ovs_interfaceid": "74bc91c0-20e1-4de1-8433-333a88443441", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.262126] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 761.262126] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52713828-70a9-4480-7c32-390d715b17f0" [ 761.262126] env[69475]: _type = "HttpNfcLease" [ 761.262126] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 761.293708] env[69475]: DEBUG nova.scheduler.client.report [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 761.458992] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "3e332e28-5db5-4f04-8a47-95406da16e21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.762s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.594941] env[69475]: DEBUG nova.network.neutron [None req-d55850ea-058d-4364-87fd-e58f3cfb46a9 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 761.681212] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507958, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067058} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 761.681212] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 761.682235] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdb0bac9-bba4-4ae6-8f3d-89687cffd4ce {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.708060] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] 712e93b6-e797-4b9f-b39b-33373cede403/712e93b6-e797-4b9f-b39b-33373cede403.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 761.708599] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4cf817c-e3b8-4f3f-ad15-974a62ef2514 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.735577] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 761.735577] env[69475]: value = "task-3507960" [ 761.735577] env[69475]: _type = "Task" [ 761.735577] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.744238] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507960, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.757907] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 761.757907] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52713828-70a9-4480-7c32-390d715b17f0" [ 761.757907] env[69475]: _type = "HttpNfcLease" [ 761.757907] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 761.758383] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Releasing lock "refresh_cache-daef2117-0d9f-4c9e-99e7-1e8a65aa1f22" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 761.758715] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Instance network_info: |[{"id": "74bc91c0-20e1-4de1-8433-333a88443441", "address": "fa:16:3e:88:95:5f", "network": {"id": "eb5b4354-900c-49ff-b7a0-136b394c6750", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1780636412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "ed51355b6daf4d4689f27ee4b8208618", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ada35c98-01a9-4352-98e4-1d20ba31f928", "external-id": "nsx-vlan-transportzone-242", "segmentation_id": 242, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74bc91c0-20", "ovs_interfaceid": "74bc91c0-20e1-4de1-8433-333a88443441", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 761.759023] env[69475]: DEBUG oslo_vmware.rw_handles [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 761.759023] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52713828-70a9-4480-7c32-390d715b17f0" [ 761.759023] env[69475]: _type = "HttpNfcLease" [ 761.759023] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 761.759445] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:95:5f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ada35c98-01a9-4352-98e4-1d20ba31f928', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '74bc91c0-20e1-4de1-8433-333a88443441', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 761.767731] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 761.768525] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b5df73e-e0dc-444a-8b8b-27585c9e80fe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.771295] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 761.771884] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-668fa021-db09-44ae-8a77-b8433161335f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.795487] env[69475]: DEBUG oslo_vmware.rw_handles [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524fed73-e72c-a63f-fd38-d2046524b15d/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 761.795717] env[69475]: DEBUG oslo_vmware.rw_handles [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524fed73-e72c-a63f-fd38-d2046524b15d/disk-0.vmdk for reading. {{(pid=69475) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 761.798646] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.789s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.799155] env[69475]: DEBUG nova.compute.manager [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 761.801613] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 761.801613] env[69475]: value = "task-3507961" [ 761.801613] env[69475]: _type = "Task" [ 761.801613] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 761.802650] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.963s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.804302] env[69475]: INFO nova.compute.claims [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 761.878296] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507961, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.911999] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5b77596b-a14e-4ea2-ae1f-4aeac8226d10 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.965839] env[69475]: DEBUG oslo_concurrency.lockutils [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.966102] env[69475]: DEBUG oslo_concurrency.lockutils [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.966388] env[69475]: DEBUG nova.compute.manager [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 762.046225] env[69475]: DEBUG nova.compute.manager [req-c75c15f4-e397-445d-a0cd-30e23c553a27 req-70d2c269-7b42-4eb3-904b-00101e65e289 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Received event network-changed-dd8084ea-8138-439f-a367-0e57562094f5 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 762.046544] env[69475]: DEBUG nova.compute.manager [req-c75c15f4-e397-445d-a0cd-30e23c553a27 req-70d2c269-7b42-4eb3-904b-00101e65e289 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Refreshing instance network info cache due to event network-changed-dd8084ea-8138-439f-a367-0e57562094f5. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 762.046791] env[69475]: DEBUG oslo_concurrency.lockutils [req-c75c15f4-e397-445d-a0cd-30e23c553a27 req-70d2c269-7b42-4eb3-904b-00101e65e289 service nova] Acquiring lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.193153] env[69475]: DEBUG nova.network.neutron [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Successfully updated port: 4059da75-efc8-42ee-90b1-8202220d1621 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 762.247803] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507960, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.307517] env[69475]: DEBUG nova.compute.manager [req-4288e66d-d90c-4038-9da5-0eb77070cb85 req-41a4947b-9ca2-43e3-b570-309e6673006a service nova] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Received event network-changed-74bc91c0-20e1-4de1-8433-333a88443441 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 762.307709] env[69475]: DEBUG nova.compute.manager [req-4288e66d-d90c-4038-9da5-0eb77070cb85 req-41a4947b-9ca2-43e3-b570-309e6673006a service nova] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Refreshing instance network info cache due to event network-changed-74bc91c0-20e1-4de1-8433-333a88443441. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 762.307945] env[69475]: DEBUG oslo_concurrency.lockutils [req-4288e66d-d90c-4038-9da5-0eb77070cb85 req-41a4947b-9ca2-43e3-b570-309e6673006a service nova] Acquiring lock "refresh_cache-daef2117-0d9f-4c9e-99e7-1e8a65aa1f22" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.308269] env[69475]: DEBUG oslo_concurrency.lockutils [req-4288e66d-d90c-4038-9da5-0eb77070cb85 req-41a4947b-9ca2-43e3-b570-309e6673006a service nova] Acquired lock "refresh_cache-daef2117-0d9f-4c9e-99e7-1e8a65aa1f22" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 762.308476] env[69475]: DEBUG nova.network.neutron [req-4288e66d-d90c-4038-9da5-0eb77070cb85 req-41a4947b-9ca2-43e3-b570-309e6673006a service nova] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Refreshing network info cache for port 74bc91c0-20e1-4de1-8433-333a88443441 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 762.318815] env[69475]: DEBUG nova.compute.utils [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 762.326961] env[69475]: DEBUG nova.compute.manager [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 762.328127] env[69475]: DEBUG nova.network.neutron [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 762.345527] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507961, 'name': CreateVM_Task, 'duration_secs': 0.465019} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.345750] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 762.346756] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.346992] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 762.348155] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 762.348263] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d99ae37-ebec-4821-86c4-4df8a3a48f2a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.354034] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 762.354034] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527e54dd-4c30-94a4-720b-9c81774cca96" [ 762.354034] env[69475]: _type = "Task" [ 762.354034] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.369951] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527e54dd-4c30-94a4-720b-9c81774cca96, 'name': SearchDatastore_Task} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.370338] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 762.370656] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 762.370882] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.371574] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 762.371715] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 762.372315] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cbfd8e7c-4dcf-441e-9560-e3bc36474fad {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.383784] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 762.383784] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 762.384450] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79e741e2-eaad-4d8f-93c0-b67f510c194c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.391964] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 762.391964] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52380769-645f-0b16-8df7-948f0900492a" [ 762.391964] env[69475]: _type = "Task" [ 762.391964] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.399497] env[69475]: DEBUG nova.policy [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '411c7e7bcdee495697199023e10202fe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '47bcbe5bc3a14fbf9ea9617ea7d50342', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 762.408573] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52380769-645f-0b16-8df7-948f0900492a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.494859] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.553234] env[69475]: DEBUG nova.network.neutron [None req-d55850ea-058d-4364-87fd-e58f3cfb46a9 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Updating instance_info_cache with network_info: [{"id": "dd8084ea-8138-439f-a367-0e57562094f5", "address": "fa:16:3e:be:9d:ab", "network": {"id": "e8c8708c-443c-4213-8716-88b361366d50", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-13457515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d041345f126f4ad69469a2771e411ce9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1520c99-af74-4d61-a8ae-56aef56ef4f0", "external-id": "nsx-vlan-transportzone-891", "segmentation_id": 891, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd8084ea-81", "ovs_interfaceid": "dd8084ea-8138-439f-a367-0e57562094f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.698762] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.698762] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 762.698762] env[69475]: DEBUG nova.network.neutron [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 762.751566] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507960, 'name': ReconfigVM_Task, 'duration_secs': 0.564678} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.752208] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Reconfigured VM instance instance-0000002a to attach disk [datastore1] 712e93b6-e797-4b9f-b39b-33373cede403/712e93b6-e797-4b9f-b39b-33373cede403.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 762.753042] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a00c2376-9757-4aab-9482-31d5098542a1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.762937] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 762.762937] env[69475]: value = "task-3507962" [ 762.762937] env[69475]: _type = "Task" [ 762.762937] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.786846] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507962, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.832737] env[69475]: DEBUG nova.compute.manager [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 762.915821] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52380769-645f-0b16-8df7-948f0900492a, 'name': SearchDatastore_Task, 'duration_secs': 0.016369} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.916790] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d83b8285-a762-4817-9190-9c13868856b8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.928818] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 762.928818] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5254bb0a-1176-82ae-5ac9-bc3468d25dc4" [ 762.928818] env[69475]: _type = "Task" [ 762.928818] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.944413] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5254bb0a-1176-82ae-5ac9-bc3468d25dc4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.021761] env[69475]: DEBUG nova.network.neutron [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Successfully created port: 7b155c9d-5e5c-499f-bfd8-a2c59e674bc2 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 763.056274] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d55850ea-058d-4364-87fd-e58f3cfb46a9 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Releasing lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.057642] env[69475]: DEBUG nova.compute.manager [None req-d55850ea-058d-4364-87fd-e58f3cfb46a9 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Inject network info {{(pid=69475) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 763.057642] env[69475]: DEBUG nova.compute.manager [None req-d55850ea-058d-4364-87fd-e58f3cfb46a9 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] network_info to inject: |[{"id": "dd8084ea-8138-439f-a367-0e57562094f5", "address": "fa:16:3e:be:9d:ab", "network": {"id": "e8c8708c-443c-4213-8716-88b361366d50", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-13457515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d041345f126f4ad69469a2771e411ce9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1520c99-af74-4d61-a8ae-56aef56ef4f0", "external-id": "nsx-vlan-transportzone-891", "segmentation_id": 891, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd8084ea-81", "ovs_interfaceid": "dd8084ea-8138-439f-a367-0e57562094f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 763.063660] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d55850ea-058d-4364-87fd-e58f3cfb46a9 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Reconfiguring VM instance to set the machine id {{(pid=69475) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 763.064159] env[69475]: DEBUG oslo_concurrency.lockutils [req-c75c15f4-e397-445d-a0cd-30e23c553a27 req-70d2c269-7b42-4eb3-904b-00101e65e289 service nova] Acquired lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.064159] env[69475]: DEBUG nova.network.neutron [req-c75c15f4-e397-445d-a0cd-30e23c553a27 req-70d2c269-7b42-4eb3-904b-00101e65e289 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Refreshing network info cache for port dd8084ea-8138-439f-a367-0e57562094f5 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 763.066077] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6aad672b-199a-439c-93aa-20679f95fdf3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.097317] env[69475]: DEBUG oslo_vmware.api [None req-d55850ea-058d-4364-87fd-e58f3cfb46a9 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Waiting for the task: (returnval){ [ 763.097317] env[69475]: value = "task-3507963" [ 763.097317] env[69475]: _type = "Task" [ 763.097317] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.112576] env[69475]: DEBUG oslo_vmware.api [None req-d55850ea-058d-4364-87fd-e58f3cfb46a9 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507963, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.183023] env[69475]: DEBUG nova.network.neutron [req-4288e66d-d90c-4038-9da5-0eb77070cb85 req-41a4947b-9ca2-43e3-b570-309e6673006a service nova] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Updated VIF entry in instance network info cache for port 74bc91c0-20e1-4de1-8433-333a88443441. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 763.183603] env[69475]: DEBUG nova.network.neutron [req-4288e66d-d90c-4038-9da5-0eb77070cb85 req-41a4947b-9ca2-43e3-b570-309e6673006a service nova] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Updating instance_info_cache with network_info: [{"id": "74bc91c0-20e1-4de1-8433-333a88443441", "address": "fa:16:3e:88:95:5f", "network": {"id": "eb5b4354-900c-49ff-b7a0-136b394c6750", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1780636412-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "ed51355b6daf4d4689f27ee4b8208618", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ada35c98-01a9-4352-98e4-1d20ba31f928", "external-id": "nsx-vlan-transportzone-242", "segmentation_id": 242, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap74bc91c0-20", "ovs_interfaceid": "74bc91c0-20e1-4de1-8433-333a88443441", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.248311] env[69475]: DEBUG nova.network.neutron [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.276536] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507962, 'name': Rename_Task, 'duration_secs': 0.271726} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.280526] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 763.283734] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a30dbeb6-906d-4eb7-a274-2e364dbd3468 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.292840] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 763.292840] env[69475]: value = "task-3507964" [ 763.292840] env[69475]: _type = "Task" [ 763.292840] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.310637] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507964, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.440993] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5254bb0a-1176-82ae-5ac9-bc3468d25dc4, 'name': SearchDatastore_Task, 'duration_secs': 0.015848} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.441294] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.441970] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] daef2117-0d9f-4c9e-99e7-1e8a65aa1f22/daef2117-0d9f-4c9e-99e7-1e8a65aa1f22.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 763.442370] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cc4df2a5-d01b-4713-8d6f-6b0fb1beb23f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.454222] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 763.454222] env[69475]: value = "task-3507965" [ 763.454222] env[69475]: _type = "Task" [ 763.454222] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.464931] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507965, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.536525] env[69475]: DEBUG nova.network.neutron [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating instance_info_cache with network_info: [{"id": "4059da75-efc8-42ee-90b1-8202220d1621", "address": "fa:16:3e:1e:8b:99", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4059da75-ef", "ovs_interfaceid": "4059da75-efc8-42ee-90b1-8202220d1621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.611398] env[69475]: DEBUG nova.objects.instance [None req-c61e3914-b2a6-41f0-8db2-a5c6324a9bf1 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Lazy-loading 'flavor' on Instance uuid 00ba5cd8-3516-4059-bcda-c2d01e165e07 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 763.622475] env[69475]: DEBUG oslo_vmware.api [None req-d55850ea-058d-4364-87fd-e58f3cfb46a9 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507963, 'name': ReconfigVM_Task, 'duration_secs': 0.224006} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.623270] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d55850ea-058d-4364-87fd-e58f3cfb46a9 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Reconfigured VM instance to set the machine id {{(pid=69475) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 763.639855] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c80130-4da6-4552-9f1b-ad8579d79b37 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.648352] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f771acdf-eb58-4a86-b3b3-d964e2497e5e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.683727] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e267993-8f6a-48e7-a5f7-78e6141a91f4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.687984] env[69475]: DEBUG oslo_concurrency.lockutils [req-4288e66d-d90c-4038-9da5-0eb77070cb85 req-41a4947b-9ca2-43e3-b570-309e6673006a service nova] Releasing lock "refresh_cache-daef2117-0d9f-4c9e-99e7-1e8a65aa1f22" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.693210] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ad81f7-644d-4c77-985a-74e39a93378f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.715090] env[69475]: DEBUG nova.compute.provider_tree [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 763.804056] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507964, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.850021] env[69475]: DEBUG nova.compute.manager [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 763.883620] env[69475]: DEBUG nova.virt.hardware [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 763.884131] env[69475]: DEBUG nova.virt.hardware [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 763.884535] env[69475]: DEBUG nova.virt.hardware [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 763.886084] env[69475]: DEBUG nova.virt.hardware [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 763.886084] env[69475]: DEBUG nova.virt.hardware [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 763.886084] env[69475]: DEBUG nova.virt.hardware [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 763.886084] env[69475]: DEBUG nova.virt.hardware [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 763.886084] env[69475]: DEBUG nova.virt.hardware [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 763.886084] env[69475]: DEBUG nova.virt.hardware [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 763.886084] env[69475]: DEBUG nova.virt.hardware [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 763.888422] env[69475]: DEBUG nova.virt.hardware [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 763.890022] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a9861b9-8a21-4a99-a66d-04c611e02d12 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.901473] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9e29ce3-8bfa-4a15-b6c6-4ecbcbf3c020 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.965992] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507965, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.041983] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.041983] env[69475]: DEBUG nova.compute.manager [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Instance network_info: |[{"id": "4059da75-efc8-42ee-90b1-8202220d1621", "address": "fa:16:3e:1e:8b:99", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4059da75-ef", "ovs_interfaceid": "4059da75-efc8-42ee-90b1-8202220d1621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 764.041983] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:8b:99', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271fe7a0-dfd7-409b-920a-cf83ef1a86a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4059da75-efc8-42ee-90b1-8202220d1621', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 764.054873] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Creating folder: Project (de2b24bdabce45a7884bdce4ed781c79). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 764.055259] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-44769db1-0df6-45c0-8acc-858297133e29 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.067582] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Created folder: Project (de2b24bdabce45a7884bdce4ed781c79) in parent group-v700823. [ 764.067582] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Creating folder: Instances. Parent ref: group-v700962. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 764.067949] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6863cddc-520d-4b2b-9007-118b4e46ecfd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.077817] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Created folder: Instances in parent group-v700962. [ 764.077962] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 764.078150] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 764.078365] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-95dfd5ef-e399-4b85-8bf9-6bcb8ffec5fa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.108444] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 764.108444] env[69475]: value = "task-3507968" [ 764.108444] env[69475]: _type = "Task" [ 764.108444] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.118253] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507968, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.126219] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c61e3914-b2a6-41f0-8db2-a5c6324a9bf1 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Acquiring lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.220317] env[69475]: DEBUG nova.scheduler.client.report [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 764.227516] env[69475]: DEBUG nova.network.neutron [req-c75c15f4-e397-445d-a0cd-30e23c553a27 req-70d2c269-7b42-4eb3-904b-00101e65e289 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Updated VIF entry in instance network info cache for port dd8084ea-8138-439f-a367-0e57562094f5. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 764.227516] env[69475]: DEBUG nova.network.neutron [req-c75c15f4-e397-445d-a0cd-30e23c553a27 req-70d2c269-7b42-4eb3-904b-00101e65e289 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Updating instance_info_cache with network_info: [{"id": "dd8084ea-8138-439f-a367-0e57562094f5", "address": "fa:16:3e:be:9d:ab", "network": {"id": "e8c8708c-443c-4213-8716-88b361366d50", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-13457515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d041345f126f4ad69469a2771e411ce9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1520c99-af74-4d61-a8ae-56aef56ef4f0", "external-id": "nsx-vlan-transportzone-891", "segmentation_id": 891, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd8084ea-81", "ovs_interfaceid": "dd8084ea-8138-439f-a367-0e57562094f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.305591] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507964, 'name': PowerOnVM_Task, 'duration_secs': 0.713899} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.306355] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 764.306588] env[69475]: INFO nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Took 9.86 seconds to spawn the instance on the hypervisor. [ 764.306780] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 764.307607] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e4bed4f-db86-4c13-ac32-b55b783cc728 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.356477] env[69475]: DEBUG nova.compute.manager [req-88d529cd-8cad-4d22-bb8f-46e6390deaac req-441cb3a2-3f20-403f-97dd-3d8ecc794a58 service nova] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Received event network-vif-plugged-4059da75-efc8-42ee-90b1-8202220d1621 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 764.356675] env[69475]: DEBUG oslo_concurrency.lockutils [req-88d529cd-8cad-4d22-bb8f-46e6390deaac req-441cb3a2-3f20-403f-97dd-3d8ecc794a58 service nova] Acquiring lock "baf27027-678d-4167-bb9b-df410aeb0e82-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.356911] env[69475]: DEBUG oslo_concurrency.lockutils [req-88d529cd-8cad-4d22-bb8f-46e6390deaac req-441cb3a2-3f20-403f-97dd-3d8ecc794a58 service nova] Lock "baf27027-678d-4167-bb9b-df410aeb0e82-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.357670] env[69475]: DEBUG oslo_concurrency.lockutils [req-88d529cd-8cad-4d22-bb8f-46e6390deaac req-441cb3a2-3f20-403f-97dd-3d8ecc794a58 service nova] Lock "baf27027-678d-4167-bb9b-df410aeb0e82-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.357670] env[69475]: DEBUG nova.compute.manager [req-88d529cd-8cad-4d22-bb8f-46e6390deaac req-441cb3a2-3f20-403f-97dd-3d8ecc794a58 service nova] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] No waiting events found dispatching network-vif-plugged-4059da75-efc8-42ee-90b1-8202220d1621 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 764.357923] env[69475]: WARNING nova.compute.manager [req-88d529cd-8cad-4d22-bb8f-46e6390deaac req-441cb3a2-3f20-403f-97dd-3d8ecc794a58 service nova] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Received unexpected event network-vif-plugged-4059da75-efc8-42ee-90b1-8202220d1621 for instance with vm_state building and task_state spawning. [ 764.358073] env[69475]: DEBUG nova.compute.manager [req-88d529cd-8cad-4d22-bb8f-46e6390deaac req-441cb3a2-3f20-403f-97dd-3d8ecc794a58 service nova] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Received event network-changed-4059da75-efc8-42ee-90b1-8202220d1621 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 764.358367] env[69475]: DEBUG nova.compute.manager [req-88d529cd-8cad-4d22-bb8f-46e6390deaac req-441cb3a2-3f20-403f-97dd-3d8ecc794a58 service nova] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Refreshing instance network info cache due to event network-changed-4059da75-efc8-42ee-90b1-8202220d1621. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 764.358519] env[69475]: DEBUG oslo_concurrency.lockutils [req-88d529cd-8cad-4d22-bb8f-46e6390deaac req-441cb3a2-3f20-403f-97dd-3d8ecc794a58 service nova] Acquiring lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.358714] env[69475]: DEBUG oslo_concurrency.lockutils [req-88d529cd-8cad-4d22-bb8f-46e6390deaac req-441cb3a2-3f20-403f-97dd-3d8ecc794a58 service nova] Acquired lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.359736] env[69475]: DEBUG nova.network.neutron [req-88d529cd-8cad-4d22-bb8f-46e6390deaac req-441cb3a2-3f20-403f-97dd-3d8ecc794a58 service nova] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Refreshing network info cache for port 4059da75-efc8-42ee-90b1-8202220d1621 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 764.465586] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507965, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.619836] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507968, 'name': CreateVM_Task, 'duration_secs': 0.501185} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.620098] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 764.621268] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.621544] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.621966] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 764.622265] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a25e3169-aeb4-4dc6-b128-a69eac7cd383 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.627805] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 764.627805] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f044f-3282-9198-b7fc-ef618e5e029c" [ 764.627805] env[69475]: _type = "Task" [ 764.627805] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.636254] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f044f-3282-9198-b7fc-ef618e5e029c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.729947] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.927s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.730477] env[69475]: DEBUG nova.compute.manager [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 764.736528] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.558s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.736528] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 764.737961] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.765s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.739613] env[69475]: INFO nova.compute.claims [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 764.744680] env[69475]: DEBUG oslo_concurrency.lockutils [req-c75c15f4-e397-445d-a0cd-30e23c553a27 req-70d2c269-7b42-4eb3-904b-00101e65e289 service nova] Releasing lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.745377] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c61e3914-b2a6-41f0-8db2-a5c6324a9bf1 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Acquired lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.771114] env[69475]: INFO nova.scheduler.client.report [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleted allocations for instance ed12921f-9be8-474d-958e-79dd16b8116e [ 764.833030] env[69475]: INFO nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Took 58.24 seconds to build instance. [ 764.971018] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507965, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.019342] env[69475]: DEBUG nova.network.neutron [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Successfully updated port: 7b155c9d-5e5c-499f-bfd8-a2c59e674bc2 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 765.113495] env[69475]: DEBUG nova.network.neutron [None req-c61e3914-b2a6-41f0-8db2-a5c6324a9bf1 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 765.144147] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f044f-3282-9198-b7fc-ef618e5e029c, 'name': SearchDatastore_Task, 'duration_secs': 0.013932} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.148180] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.148180] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 765.148557] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.148842] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.149136] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 765.150388] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89aaaf7a-a6f7-4a97-8e66-7a035d359c92 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.166475] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 765.166475] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 765.166475] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b5230a0-c7f8-4989-9c2c-28215a0f1c55 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.176196] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 765.176196] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a0834a-cee5-57fe-9a6a-6bf7c84ea472" [ 765.176196] env[69475]: _type = "Task" [ 765.176196] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.184760] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a0834a-cee5-57fe-9a6a-6bf7c84ea472, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.191814] env[69475]: DEBUG nova.compute.manager [req-a6c9f51d-6764-44a3-8497-e60a672eabb3 req-2b0282e2-1ddc-4fb4-937f-5c961c13700b service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Received event network-changed-dd8084ea-8138-439f-a367-0e57562094f5 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 765.191814] env[69475]: DEBUG nova.compute.manager [req-a6c9f51d-6764-44a3-8497-e60a672eabb3 req-2b0282e2-1ddc-4fb4-937f-5c961c13700b service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Refreshing instance network info cache due to event network-changed-dd8084ea-8138-439f-a367-0e57562094f5. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 765.191814] env[69475]: DEBUG oslo_concurrency.lockutils [req-a6c9f51d-6764-44a3-8497-e60a672eabb3 req-2b0282e2-1ddc-4fb4-937f-5c961c13700b service nova] Acquiring lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.247126] env[69475]: DEBUG nova.compute.utils [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 765.253022] env[69475]: DEBUG nova.compute.manager [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 765.253022] env[69475]: DEBUG nova.network.neutron [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 765.266027] env[69475]: DEBUG nova.network.neutron [req-88d529cd-8cad-4d22-bb8f-46e6390deaac req-441cb3a2-3f20-403f-97dd-3d8ecc794a58 service nova] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updated VIF entry in instance network info cache for port 4059da75-efc8-42ee-90b1-8202220d1621. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 765.266379] env[69475]: DEBUG nova.network.neutron [req-88d529cd-8cad-4d22-bb8f-46e6390deaac req-441cb3a2-3f20-403f-97dd-3d8ecc794a58 service nova] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating instance_info_cache with network_info: [{"id": "4059da75-efc8-42ee-90b1-8202220d1621", "address": "fa:16:3e:1e:8b:99", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4059da75-ef", "ovs_interfaceid": "4059da75-efc8-42ee-90b1-8202220d1621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.279346] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d1d5ffa7-5689-43cf-a93b-1bccaaa86a82 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "ed12921f-9be8-474d-958e-79dd16b8116e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.978s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.298082] env[69475]: DEBUG nova.policy [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42c54237c534486d86b3a161149fd013', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e760df406d80477a9a7c4d345093d3db', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 765.335936] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "712e93b6-e797-4b9f-b39b-33373cede403" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.593s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.472029] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507965, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.685845} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.472029] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] daef2117-0d9f-4c9e-99e7-1e8a65aa1f22/daef2117-0d9f-4c9e-99e7-1e8a65aa1f22.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 765.472029] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 765.472312] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f381a83f-6d1f-4366-8d8f-6271c0bc018a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.479519] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 765.479519] env[69475]: value = "task-3507969" [ 765.479519] env[69475]: _type = "Task" [ 765.479519] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.491379] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507969, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.521947] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquiring lock "refresh_cache-df73dd41-7455-4482-abb2-b61b26fcf403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.522116] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquired lock "refresh_cache-df73dd41-7455-4482-abb2-b61b26fcf403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 765.522274] env[69475]: DEBUG nova.network.neutron [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 765.624798] env[69475]: DEBUG nova.network.neutron [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Successfully created port: 5283f252-4c4d-4aaa-81d9-5fccc6edff8d {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 765.690200] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a0834a-cee5-57fe-9a6a-6bf7c84ea472, 'name': SearchDatastore_Task, 'duration_secs': 0.013791} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.691074] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5bf77f58-6aa0-4aa1-a06b-427d5334fdc4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.696838] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 765.696838] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521da437-aec2-951f-39f0-ac9b526acca7" [ 765.696838] env[69475]: _type = "Task" [ 765.696838] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.707430] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521da437-aec2-951f-39f0-ac9b526acca7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.752557] env[69475]: DEBUG nova.compute.manager [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 765.769389] env[69475]: DEBUG oslo_concurrency.lockutils [req-88d529cd-8cad-4d22-bb8f-46e6390deaac req-441cb3a2-3f20-403f-97dd-3d8ecc794a58 service nova] Releasing lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 765.841670] env[69475]: DEBUG nova.compute.manager [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 765.996105] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507969, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.192416} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.996423] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 765.997254] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31413c2c-5b33-4a63-81dd-adbd7ce8f5b0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.021773] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] daef2117-0d9f-4c9e-99e7-1e8a65aa1f22/daef2117-0d9f-4c9e-99e7-1e8a65aa1f22.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 766.027782] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0732607b-2863-421d-9882-30d187679344 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.052153] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 766.052153] env[69475]: value = "task-3507970" [ 766.052153] env[69475]: _type = "Task" [ 766.052153] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.063401] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507970, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.123788] env[69475]: DEBUG nova.network.neutron [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 766.142274] env[69475]: DEBUG nova.network.neutron [None req-c61e3914-b2a6-41f0-8db2-a5c6324a9bf1 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Updating instance_info_cache with network_info: [{"id": "dd8084ea-8138-439f-a367-0e57562094f5", "address": "fa:16:3e:be:9d:ab", "network": {"id": "e8c8708c-443c-4213-8716-88b361366d50", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-13457515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d041345f126f4ad69469a2771e411ce9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1520c99-af74-4d61-a8ae-56aef56ef4f0", "external-id": "nsx-vlan-transportzone-891", "segmentation_id": 891, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd8084ea-81", "ovs_interfaceid": "dd8084ea-8138-439f-a367-0e57562094f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.213367] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521da437-aec2-951f-39f0-ac9b526acca7, 'name': SearchDatastore_Task, 'duration_secs': 0.021946} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.214232] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.214910] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] baf27027-678d-4167-bb9b-df410aeb0e82/baf27027-678d-4167-bb9b-df410aeb0e82.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 766.215033] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-14a28668-c4c8-4d71-9633-0b157d7bcee5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.230142] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 766.230142] env[69475]: value = "task-3507971" [ 766.230142] env[69475]: _type = "Task" [ 766.230142] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.239429] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3507971, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.332953] env[69475]: DEBUG nova.network.neutron [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Updating instance_info_cache with network_info: [{"id": "7b155c9d-5e5c-499f-bfd8-a2c59e674bc2", "address": "fa:16:3e:89:09:25", "network": {"id": "1ccefa75-7f28-427a-a2dc-65225b56bc7d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-673445520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47bcbe5bc3a14fbf9ea9617ea7d50342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b155c9d-5e", "ovs_interfaceid": "7b155c9d-5e5c-499f-bfd8-a2c59e674bc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.380793] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.440481] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf831f53-5fc4-4bc8-a136-a6d74e61b1e1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.449096] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90a40d5a-6502-4e7c-95bf-8af20190e3fa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.487267] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580ace92-1d85-4a7b-ae74-8f8fd7baacce {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.497051] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d433bb94-feaa-4b3b-a3e3-1c9a729be23d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.512932] env[69475]: DEBUG nova.compute.provider_tree [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 766.565953] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507970, 'name': ReconfigVM_Task, 'duration_secs': 0.397033} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.565953] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Reconfigured VM instance instance-0000002b to attach disk [datastore1] daef2117-0d9f-4c9e-99e7-1e8a65aa1f22/daef2117-0d9f-4c9e-99e7-1e8a65aa1f22.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 766.566540] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75a8703f-f5cc-4270-bfdc-50436c7740d7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.573656] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 766.573656] env[69475]: value = "task-3507972" [ 766.573656] env[69475]: _type = "Task" [ 766.573656] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.583587] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507972, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.644957] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c61e3914-b2a6-41f0-8db2-a5c6324a9bf1 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Releasing lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.645265] env[69475]: DEBUG nova.compute.manager [None req-c61e3914-b2a6-41f0-8db2-a5c6324a9bf1 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Inject network info {{(pid=69475) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 766.645571] env[69475]: DEBUG nova.compute.manager [None req-c61e3914-b2a6-41f0-8db2-a5c6324a9bf1 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] network_info to inject: |[{"id": "dd8084ea-8138-439f-a367-0e57562094f5", "address": "fa:16:3e:be:9d:ab", "network": {"id": "e8c8708c-443c-4213-8716-88b361366d50", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-13457515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d041345f126f4ad69469a2771e411ce9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1520c99-af74-4d61-a8ae-56aef56ef4f0", "external-id": "nsx-vlan-transportzone-891", "segmentation_id": 891, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd8084ea-81", "ovs_interfaceid": "dd8084ea-8138-439f-a367-0e57562094f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 766.651266] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c61e3914-b2a6-41f0-8db2-a5c6324a9bf1 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Reconfiguring VM instance to set the machine id {{(pid=69475) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 766.651715] env[69475]: DEBUG oslo_concurrency.lockutils [req-a6c9f51d-6764-44a3-8497-e60a672eabb3 req-2b0282e2-1ddc-4fb4-937f-5c961c13700b service nova] Acquired lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 766.651943] env[69475]: DEBUG nova.network.neutron [req-a6c9f51d-6764-44a3-8497-e60a672eabb3 req-2b0282e2-1ddc-4fb4-937f-5c961c13700b service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Refreshing network info cache for port dd8084ea-8138-439f-a367-0e57562094f5 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 766.653394] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d5fafcc-2c4b-4aa5-b9aa-1507a0aa28e0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.672602] env[69475]: DEBUG oslo_vmware.api [None req-c61e3914-b2a6-41f0-8db2-a5c6324a9bf1 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Waiting for the task: (returnval){ [ 766.672602] env[69475]: value = "task-3507973" [ 766.672602] env[69475]: _type = "Task" [ 766.672602] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.682316] env[69475]: DEBUG oslo_vmware.api [None req-c61e3914-b2a6-41f0-8db2-a5c6324a9bf1 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507973, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.742407] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3507971, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.754999] env[69475]: DEBUG nova.compute.manager [req-774c7b9f-5b96-42c4-aabc-13bfc610f856 req-9bd7557a-fcd4-49d9-a265-d8d417ffdac6 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Received event network-vif-plugged-7b155c9d-5e5c-499f-bfd8-a2c59e674bc2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 766.754999] env[69475]: DEBUG oslo_concurrency.lockutils [req-774c7b9f-5b96-42c4-aabc-13bfc610f856 req-9bd7557a-fcd4-49d9-a265-d8d417ffdac6 service nova] Acquiring lock "df73dd41-7455-4482-abb2-b61b26fcf403-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.755222] env[69475]: DEBUG oslo_concurrency.lockutils [req-774c7b9f-5b96-42c4-aabc-13bfc610f856 req-9bd7557a-fcd4-49d9-a265-d8d417ffdac6 service nova] Lock "df73dd41-7455-4482-abb2-b61b26fcf403-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.755456] env[69475]: DEBUG oslo_concurrency.lockutils [req-774c7b9f-5b96-42c4-aabc-13bfc610f856 req-9bd7557a-fcd4-49d9-a265-d8d417ffdac6 service nova] Lock "df73dd41-7455-4482-abb2-b61b26fcf403-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.755674] env[69475]: DEBUG nova.compute.manager [req-774c7b9f-5b96-42c4-aabc-13bfc610f856 req-9bd7557a-fcd4-49d9-a265-d8d417ffdac6 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] No waiting events found dispatching network-vif-plugged-7b155c9d-5e5c-499f-bfd8-a2c59e674bc2 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 766.755847] env[69475]: WARNING nova.compute.manager [req-774c7b9f-5b96-42c4-aabc-13bfc610f856 req-9bd7557a-fcd4-49d9-a265-d8d417ffdac6 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Received unexpected event network-vif-plugged-7b155c9d-5e5c-499f-bfd8-a2c59e674bc2 for instance with vm_state building and task_state spawning. [ 766.756214] env[69475]: DEBUG nova.compute.manager [req-774c7b9f-5b96-42c4-aabc-13bfc610f856 req-9bd7557a-fcd4-49d9-a265-d8d417ffdac6 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Received event network-changed-7b155c9d-5e5c-499f-bfd8-a2c59e674bc2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 766.756330] env[69475]: DEBUG nova.compute.manager [req-774c7b9f-5b96-42c4-aabc-13bfc610f856 req-9bd7557a-fcd4-49d9-a265-d8d417ffdac6 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Refreshing instance network info cache due to event network-changed-7b155c9d-5e5c-499f-bfd8-a2c59e674bc2. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 766.756558] env[69475]: DEBUG oslo_concurrency.lockutils [req-774c7b9f-5b96-42c4-aabc-13bfc610f856 req-9bd7557a-fcd4-49d9-a265-d8d417ffdac6 service nova] Acquiring lock "refresh_cache-df73dd41-7455-4482-abb2-b61b26fcf403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.764493] env[69475]: DEBUG nova.compute.manager [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 766.794362] env[69475]: DEBUG nova.virt.hardware [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 766.794362] env[69475]: DEBUG nova.virt.hardware [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 766.794549] env[69475]: DEBUG nova.virt.hardware [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 766.794682] env[69475]: DEBUG nova.virt.hardware [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 766.794824] env[69475]: DEBUG nova.virt.hardware [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 766.794968] env[69475]: DEBUG nova.virt.hardware [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 766.795207] env[69475]: DEBUG nova.virt.hardware [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 766.795396] env[69475]: DEBUG nova.virt.hardware [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 766.795570] env[69475]: DEBUG nova.virt.hardware [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 766.795741] env[69475]: DEBUG nova.virt.hardware [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 766.795920] env[69475]: DEBUG nova.virt.hardware [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 766.797184] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e17126c-b377-49c3-a59f-ff204c55810b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.805436] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41664485-c849-4a7e-9724-281996a73791 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.836363] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Releasing lock "refresh_cache-df73dd41-7455-4482-abb2-b61b26fcf403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.837556] env[69475]: DEBUG nova.compute.manager [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Instance network_info: |[{"id": "7b155c9d-5e5c-499f-bfd8-a2c59e674bc2", "address": "fa:16:3e:89:09:25", "network": {"id": "1ccefa75-7f28-427a-a2dc-65225b56bc7d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-673445520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47bcbe5bc3a14fbf9ea9617ea7d50342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b155c9d-5e", "ovs_interfaceid": "7b155c9d-5e5c-499f-bfd8-a2c59e674bc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 766.837556] env[69475]: DEBUG oslo_concurrency.lockutils [req-774c7b9f-5b96-42c4-aabc-13bfc610f856 req-9bd7557a-fcd4-49d9-a265-d8d417ffdac6 service nova] Acquired lock "refresh_cache-df73dd41-7455-4482-abb2-b61b26fcf403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 766.837556] env[69475]: DEBUG nova.network.neutron [req-774c7b9f-5b96-42c4-aabc-13bfc610f856 req-9bd7557a-fcd4-49d9-a265-d8d417ffdac6 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Refreshing network info cache for port 7b155c9d-5e5c-499f-bfd8-a2c59e674bc2 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 766.838410] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:09:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7b155c9d-5e5c-499f-bfd8-a2c59e674bc2', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 766.846202] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 766.847101] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 766.847961] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ceeff291-219b-4299-ade0-5372268e730a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.871626] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 766.871626] env[69475]: value = "task-3507974" [ 766.871626] env[69475]: _type = "Task" [ 766.871626] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.880801] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507974, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.945779] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Acquiring lock "56f0e59a-1c37-4977-81dc-da1a274ce7e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.946151] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Lock "56f0e59a-1c37-4977-81dc-da1a274ce7e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.016546] env[69475]: DEBUG nova.scheduler.client.report [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 767.084750] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507972, 'name': Rename_Task, 'duration_secs': 0.18307} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.085072] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 767.085344] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-26c1d2c8-9be6-483b-bb5c-6bbc544ed524 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.095056] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 767.095056] env[69475]: value = "task-3507975" [ 767.095056] env[69475]: _type = "Task" [ 767.095056] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.107933] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507975, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.163458] env[69475]: DEBUG nova.network.neutron [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Successfully updated port: 5283f252-4c4d-4aaa-81d9-5fccc6edff8d {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 767.184788] env[69475]: DEBUG oslo_vmware.api [None req-c61e3914-b2a6-41f0-8db2-a5c6324a9bf1 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507973, 'name': ReconfigVM_Task, 'duration_secs': 0.169032} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.185269] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c61e3914-b2a6-41f0-8db2-a5c6324a9bf1 tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Reconfigured VM instance to set the machine id {{(pid=69475) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 767.224172] env[69475]: DEBUG oslo_concurrency.lockutils [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Acquiring lock "00ba5cd8-3516-4059-bcda-c2d01e165e07" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.225031] env[69475]: DEBUG oslo_concurrency.lockutils [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Lock "00ba5cd8-3516-4059-bcda-c2d01e165e07" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.225031] env[69475]: DEBUG oslo_concurrency.lockutils [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Acquiring lock "00ba5cd8-3516-4059-bcda-c2d01e165e07-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.225031] env[69475]: DEBUG oslo_concurrency.lockutils [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Lock "00ba5cd8-3516-4059-bcda-c2d01e165e07-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.225270] env[69475]: DEBUG oslo_concurrency.lockutils [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Lock "00ba5cd8-3516-4059-bcda-c2d01e165e07-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.229399] env[69475]: INFO nova.compute.manager [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Terminating instance [ 767.243689] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3507971, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.589551} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.244937] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] baf27027-678d-4167-bb9b-df410aeb0e82/baf27027-678d-4167-bb9b-df410aeb0e82.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 767.245164] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 767.245551] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b0503c5b-f75d-4ad5-930d-0d0e4d187592 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.254468] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 767.254468] env[69475]: value = "task-3507976" [ 767.254468] env[69475]: _type = "Task" [ 767.254468] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.266524] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3507976, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.307375] env[69475]: DEBUG nova.compute.manager [req-bcc71e4c-265f-4734-8c6b-820f20083c37 req-a4e3e2d5-469d-4537-9b0a-00688ab65a94 service nova] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Received event network-vif-plugged-5283f252-4c4d-4aaa-81d9-5fccc6edff8d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 767.307375] env[69475]: DEBUG oslo_concurrency.lockutils [req-bcc71e4c-265f-4734-8c6b-820f20083c37 req-a4e3e2d5-469d-4537-9b0a-00688ab65a94 service nova] Acquiring lock "a75d7a92-4ac7-4fa0-90f0-f0a0993e881e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 767.307375] env[69475]: DEBUG oslo_concurrency.lockutils [req-bcc71e4c-265f-4734-8c6b-820f20083c37 req-a4e3e2d5-469d-4537-9b0a-00688ab65a94 service nova] Lock "a75d7a92-4ac7-4fa0-90f0-f0a0993e881e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.307799] env[69475]: DEBUG oslo_concurrency.lockutils [req-bcc71e4c-265f-4734-8c6b-820f20083c37 req-a4e3e2d5-469d-4537-9b0a-00688ab65a94 service nova] Lock "a75d7a92-4ac7-4fa0-90f0-f0a0993e881e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.307799] env[69475]: DEBUG nova.compute.manager [req-bcc71e4c-265f-4734-8c6b-820f20083c37 req-a4e3e2d5-469d-4537-9b0a-00688ab65a94 service nova] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] No waiting events found dispatching network-vif-plugged-5283f252-4c4d-4aaa-81d9-5fccc6edff8d {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 767.307799] env[69475]: WARNING nova.compute.manager [req-bcc71e4c-265f-4734-8c6b-820f20083c37 req-a4e3e2d5-469d-4537-9b0a-00688ab65a94 service nova] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Received unexpected event network-vif-plugged-5283f252-4c4d-4aaa-81d9-5fccc6edff8d for instance with vm_state building and task_state spawning. [ 767.308302] env[69475]: DEBUG nova.compute.manager [req-bcc71e4c-265f-4734-8c6b-820f20083c37 req-a4e3e2d5-469d-4537-9b0a-00688ab65a94 service nova] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Received event network-changed-5283f252-4c4d-4aaa-81d9-5fccc6edff8d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 767.308302] env[69475]: DEBUG nova.compute.manager [req-bcc71e4c-265f-4734-8c6b-820f20083c37 req-a4e3e2d5-469d-4537-9b0a-00688ab65a94 service nova] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Refreshing instance network info cache due to event network-changed-5283f252-4c4d-4aaa-81d9-5fccc6edff8d. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 767.308302] env[69475]: DEBUG oslo_concurrency.lockutils [req-bcc71e4c-265f-4734-8c6b-820f20083c37 req-a4e3e2d5-469d-4537-9b0a-00688ab65a94 service nova] Acquiring lock "refresh_cache-a75d7a92-4ac7-4fa0-90f0-f0a0993e881e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.308494] env[69475]: DEBUG oslo_concurrency.lockutils [req-bcc71e4c-265f-4734-8c6b-820f20083c37 req-a4e3e2d5-469d-4537-9b0a-00688ab65a94 service nova] Acquired lock "refresh_cache-a75d7a92-4ac7-4fa0-90f0-f0a0993e881e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.308616] env[69475]: DEBUG nova.network.neutron [req-bcc71e4c-265f-4734-8c6b-820f20083c37 req-a4e3e2d5-469d-4537-9b0a-00688ab65a94 service nova] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Refreshing network info cache for port 5283f252-4c4d-4aaa-81d9-5fccc6edff8d {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 767.384679] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507974, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.459372] env[69475]: DEBUG nova.network.neutron [req-a6c9f51d-6764-44a3-8497-e60a672eabb3 req-2b0282e2-1ddc-4fb4-937f-5c961c13700b service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Updated VIF entry in instance network info cache for port dd8084ea-8138-439f-a367-0e57562094f5. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 767.459820] env[69475]: DEBUG nova.network.neutron [req-a6c9f51d-6764-44a3-8497-e60a672eabb3 req-2b0282e2-1ddc-4fb4-937f-5c961c13700b service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Updating instance_info_cache with network_info: [{"id": "dd8084ea-8138-439f-a367-0e57562094f5", "address": "fa:16:3e:be:9d:ab", "network": {"id": "e8c8708c-443c-4213-8716-88b361366d50", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-13457515-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.199", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d041345f126f4ad69469a2771e411ce9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1520c99-af74-4d61-a8ae-56aef56ef4f0", "external-id": "nsx-vlan-transportzone-891", "segmentation_id": 891, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd8084ea-81", "ovs_interfaceid": "dd8084ea-8138-439f-a367-0e57562094f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.525024] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.785s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 767.525024] env[69475]: DEBUG nova.compute.manager [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 767.526896] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.236s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 767.528613] env[69475]: INFO nova.compute.claims [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 767.610166] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507975, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.658776] env[69475]: DEBUG nova.network.neutron [req-774c7b9f-5b96-42c4-aabc-13bfc610f856 req-9bd7557a-fcd4-49d9-a265-d8d417ffdac6 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Updated VIF entry in instance network info cache for port 7b155c9d-5e5c-499f-bfd8-a2c59e674bc2. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 767.659148] env[69475]: DEBUG nova.network.neutron [req-774c7b9f-5b96-42c4-aabc-13bfc610f856 req-9bd7557a-fcd4-49d9-a265-d8d417ffdac6 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Updating instance_info_cache with network_info: [{"id": "7b155c9d-5e5c-499f-bfd8-a2c59e674bc2", "address": "fa:16:3e:89:09:25", "network": {"id": "1ccefa75-7f28-427a-a2dc-65225b56bc7d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-673445520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47bcbe5bc3a14fbf9ea9617ea7d50342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b155c9d-5e", "ovs_interfaceid": "7b155c9d-5e5c-499f-bfd8-a2c59e674bc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.667174] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "refresh_cache-a75d7a92-4ac7-4fa0-90f0-f0a0993e881e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.737092] env[69475]: DEBUG nova.compute.manager [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 767.737396] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 767.738656] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67017f6d-9c25-48c8-90dd-af68a44ffc2e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.747352] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 767.747635] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f8959d4-a685-4b7a-9636-3a4b015f5ca5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.754734] env[69475]: DEBUG oslo_vmware.api [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Waiting for the task: (returnval){ [ 767.754734] env[69475]: value = "task-3507977" [ 767.754734] env[69475]: _type = "Task" [ 767.754734] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.767697] env[69475]: DEBUG oslo_vmware.api [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507977, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.771527] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3507976, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078573} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.771831] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 767.772667] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f479481-dac4-49f3-b4e8-ac88d1c7f574 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.797175] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] baf27027-678d-4167-bb9b-df410aeb0e82/baf27027-678d-4167-bb9b-df410aeb0e82.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 767.797582] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0197cce6-2cc9-466f-bfca-b1401be11923 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.819702] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 767.819702] env[69475]: value = "task-3507978" [ 767.819702] env[69475]: _type = "Task" [ 767.819702] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.829101] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3507978, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.858917] env[69475]: DEBUG nova.network.neutron [req-bcc71e4c-265f-4734-8c6b-820f20083c37 req-a4e3e2d5-469d-4537-9b0a-00688ab65a94 service nova] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 767.884136] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507974, 'name': CreateVM_Task, 'duration_secs': 0.575448} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.884323] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 767.885030] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 767.885202] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 767.885525] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 767.885787] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f8cc726-7913-4920-a6ea-62949c4cbe8b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.890516] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 767.890516] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b636a1-ab34-0e0b-8feb-6d4af298dea5" [ 767.890516] env[69475]: _type = "Task" [ 767.890516] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.899275] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b636a1-ab34-0e0b-8feb-6d4af298dea5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.962981] env[69475]: DEBUG oslo_concurrency.lockutils [req-a6c9f51d-6764-44a3-8497-e60a672eabb3 req-2b0282e2-1ddc-4fb4-937f-5c961c13700b service nova] Releasing lock "refresh_cache-00ba5cd8-3516-4059-bcda-c2d01e165e07" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.033565] env[69475]: DEBUG nova.compute.utils [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 768.041697] env[69475]: DEBUG nova.compute.manager [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 768.045024] env[69475]: DEBUG nova.network.neutron [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 768.081414] env[69475]: DEBUG nova.network.neutron [req-bcc71e4c-265f-4734-8c6b-820f20083c37 req-a4e3e2d5-469d-4537-9b0a-00688ab65a94 service nova] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.090566] env[69475]: DEBUG nova.policy [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1093b71068124191a1119ade8f913bed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c76e3845a6d64757b175062c3e2c6198', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 768.107756] env[69475]: DEBUG oslo_vmware.api [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507975, 'name': PowerOnVM_Task, 'duration_secs': 0.710814} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.108456] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 768.108456] env[69475]: INFO nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Took 10.86 seconds to spawn the instance on the hypervisor. [ 768.108456] env[69475]: DEBUG nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 768.109088] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126c4994-e385-4490-a9c2-c2a532dbbf6e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.164360] env[69475]: DEBUG oslo_concurrency.lockutils [req-774c7b9f-5b96-42c4-aabc-13bfc610f856 req-9bd7557a-fcd4-49d9-a265-d8d417ffdac6 service nova] Releasing lock "refresh_cache-df73dd41-7455-4482-abb2-b61b26fcf403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.265658] env[69475]: DEBUG oslo_vmware.api [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507977, 'name': PowerOffVM_Task, 'duration_secs': 0.270158} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.265955] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 768.266205] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 768.266488] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-12a072f0-50ab-4e61-b075-ac032906cd08 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.333347] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3507978, 'name': ReconfigVM_Task, 'duration_secs': 0.497631} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.334394] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Reconfigured VM instance instance-0000002c to attach disk [datastore2] baf27027-678d-4167-bb9b-df410aeb0e82/baf27027-678d-4167-bb9b-df410aeb0e82.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 768.335824] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b35a5b3-8212-42bb-9369-8b8d56278f63 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.337788] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 768.338085] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 768.338189] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Deleting the datastore file [datastore1] 00ba5cd8-3516-4059-bcda-c2d01e165e07 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 768.338532] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b49a93c3-739d-4d12-b19b-afc965903c61 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.346223] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 768.346223] env[69475]: value = "task-3507980" [ 768.346223] env[69475]: _type = "Task" [ 768.346223] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.347816] env[69475]: DEBUG oslo_vmware.api [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Waiting for the task: (returnval){ [ 768.347816] env[69475]: value = "task-3507981" [ 768.347816] env[69475]: _type = "Task" [ 768.347816] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.359668] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3507980, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.366191] env[69475]: DEBUG oslo_vmware.api [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507981, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.408061] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b636a1-ab34-0e0b-8feb-6d4af298dea5, 'name': SearchDatastore_Task, 'duration_secs': 0.014807} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.408887] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.409622] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 768.410038] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.410264] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.410523] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 768.410930] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-635bb58d-f43e-4133-9104-5d28017ed8cf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.429598] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 768.429940] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 768.431064] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f7799bc-6e8c-4118-aaa1-1b93a9446643 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.437322] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 768.437322] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523aeb70-3eda-be69-d75c-fd6c5f260c3f" [ 768.437322] env[69475]: _type = "Task" [ 768.437322] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.443031] env[69475]: DEBUG nova.network.neutron [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Successfully created port: eb7198c7-072e-4cfe-bfdb-5306e3098955 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 768.448705] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523aeb70-3eda-be69-d75c-fd6c5f260c3f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.543032] env[69475]: DEBUG nova.compute.manager [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 768.584221] env[69475]: DEBUG oslo_concurrency.lockutils [req-bcc71e4c-265f-4734-8c6b-820f20083c37 req-a4e3e2d5-469d-4537-9b0a-00688ab65a94 service nova] Releasing lock "refresh_cache-a75d7a92-4ac7-4fa0-90f0-f0a0993e881e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.584776] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquired lock "refresh_cache-a75d7a92-4ac7-4fa0-90f0-f0a0993e881e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.585240] env[69475]: DEBUG nova.network.neutron [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 768.627438] env[69475]: INFO nova.compute.manager [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Took 59.10 seconds to build instance. [ 768.867657] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3507980, 'name': Rename_Task, 'duration_secs': 0.200309} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.867940] env[69475]: DEBUG oslo_vmware.api [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Task: {'id': task-3507981, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.223306} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.870803] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 768.871098] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 768.871283] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 768.871484] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 768.871666] env[69475]: INFO nova.compute.manager [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Took 1.13 seconds to destroy the instance on the hypervisor. [ 768.871900] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 768.872432] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-24c36c85-a068-4c82-81e4-85eb115b1a45 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.874121] env[69475]: DEBUG nova.compute.manager [-] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 768.874230] env[69475]: DEBUG nova.network.neutron [-] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 768.880637] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 768.880637] env[69475]: value = "task-3507982" [ 768.880637] env[69475]: _type = "Task" [ 768.880637] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.893212] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3507982, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.951123] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523aeb70-3eda-be69-d75c-fd6c5f260c3f, 'name': SearchDatastore_Task, 'duration_secs': 0.01294} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.952057] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4550979-a96f-42d0-80dc-0197809991e9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.960356] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 768.960356] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52108cb6-8f21-39c0-d461-428a632ff0d6" [ 768.960356] env[69475]: _type = "Task" [ 768.960356] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.969219] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52108cb6-8f21-39c0-d461-428a632ff0d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.049380] env[69475]: INFO nova.virt.block_device [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Booting with volume f89046dd-6d18-4fc2-bdc5-f7976aa2861d at /dev/sda [ 769.112080] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2a7d9915-b225-49a3-a862-50bbda22d23e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.123974] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ade4f4-8e7e-41f7-86a5-2e4b4ad42c89 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.144026] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f07fc508-20dd-410f-9608-ffd78f28ebac tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "daef2117-0d9f-4c9e-99e7-1e8a65aa1f22" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 100.362s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 769.179152] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e1d44450-c1d0-4e98-b014-eaa346a58284 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.182185] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a8c47b-697d-4941-8d15-fab599886115 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.192685] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735d9df8-9813-4bd0-91d0-1e79046c113d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.198860] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c7718fb-5dec-45c1-b9ff-256ea124b4c2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.254709] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1360d169-e039-441b-8576-dbcfb990f58a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.258116] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfb46a6-27ce-4c23-964f-d186a31f4a74 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.267494] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d493937d-ad64-43ee-93ba-312fce04b19c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.272592] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c13021-ff5b-47f5-96a7-278ab7682562 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.288055] env[69475]: DEBUG nova.compute.provider_tree [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 769.290112] env[69475]: DEBUG nova.network.neutron [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 769.295428] env[69475]: DEBUG nova.virt.block_device [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Updating existing volume attachment record: de8e7333-e7d9-476f-981d-83b361f13ee8 {{(pid=69475) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 769.395870] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3507982, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.478025] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52108cb6-8f21-39c0-d461-428a632ff0d6, 'name': SearchDatastore_Task, 'duration_secs': 0.01639} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.478025] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 769.478025] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] df73dd41-7455-4482-abb2-b61b26fcf403/df73dd41-7455-4482-abb2-b61b26fcf403.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 769.478025] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29a30c13-29e4-488b-b213-f754eba7d7a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.485695] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 769.485695] env[69475]: value = "task-3507983" [ 769.485695] env[69475]: _type = "Task" [ 769.485695] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 769.495744] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507983, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.652438] env[69475]: DEBUG nova.compute.manager [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 769.721932] env[69475]: DEBUG nova.network.neutron [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Updating instance_info_cache with network_info: [{"id": "5283f252-4c4d-4aaa-81d9-5fccc6edff8d", "address": "fa:16:3e:18:a5:69", "network": {"id": "36231912-6ad2-4f94-b3f0-3e1c47b777fe", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-440948387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e760df406d80477a9a7c4d345093d3db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5283f252-4c", "ovs_interfaceid": "5283f252-4c4d-4aaa-81d9-5fccc6edff8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.802202] env[69475]: DEBUG nova.scheduler.client.report [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 769.893866] env[69475]: DEBUG oslo_vmware.api [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3507982, 'name': PowerOnVM_Task, 'duration_secs': 0.744314} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 769.894220] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 769.894495] env[69475]: INFO nova.compute.manager [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Took 9.88 seconds to spawn the instance on the hypervisor. [ 769.894602] env[69475]: DEBUG nova.compute.manager [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 769.895501] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced720f9-1da9-41ee-b6a9-798249a0d6a7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.997654] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507983, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.043265] env[69475]: DEBUG nova.network.neutron [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Successfully updated port: eb7198c7-072e-4cfe-bfdb-5306e3098955 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 770.175128] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.225267] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Releasing lock "refresh_cache-a75d7a92-4ac7-4fa0-90f0-f0a0993e881e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.225629] env[69475]: DEBUG nova.compute.manager [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Instance network_info: |[{"id": "5283f252-4c4d-4aaa-81d9-5fccc6edff8d", "address": "fa:16:3e:18:a5:69", "network": {"id": "36231912-6ad2-4f94-b3f0-3e1c47b777fe", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-440948387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e760df406d80477a9a7c4d345093d3db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5283f252-4c", "ovs_interfaceid": "5283f252-4c4d-4aaa-81d9-5fccc6edff8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 770.226109] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:18:a5:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '424fd631-4456-4ce2-8924-a2ed81d60bd6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5283f252-4c4d-4aaa-81d9-5fccc6edff8d', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 770.235465] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 770.235731] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 770.235964] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7cabff03-5921-4af0-aa68-32f3c1887af7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.262016] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 770.262016] env[69475]: value = "task-3507984" [ 770.262016] env[69475]: _type = "Task" [ 770.262016] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.268518] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507984, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.309450] env[69475]: DEBUG nova.compute.manager [req-f7346e9a-777f-49af-a9a5-d349d856e6c3 req-436de500-e059-4212-8e02-9c1cac31748b service nova] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Received event network-vif-plugged-eb7198c7-072e-4cfe-bfdb-5306e3098955 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 770.309450] env[69475]: DEBUG oslo_concurrency.lockutils [req-f7346e9a-777f-49af-a9a5-d349d856e6c3 req-436de500-e059-4212-8e02-9c1cac31748b service nova] Acquiring lock "8bea34ef-0caf-4cdb-a689-dd747d9b52ea-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.309450] env[69475]: DEBUG oslo_concurrency.lockutils [req-f7346e9a-777f-49af-a9a5-d349d856e6c3 req-436de500-e059-4212-8e02-9c1cac31748b service nova] Lock "8bea34ef-0caf-4cdb-a689-dd747d9b52ea-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.309450] env[69475]: DEBUG oslo_concurrency.lockutils [req-f7346e9a-777f-49af-a9a5-d349d856e6c3 req-436de500-e059-4212-8e02-9c1cac31748b service nova] Lock "8bea34ef-0caf-4cdb-a689-dd747d9b52ea-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.309913] env[69475]: DEBUG nova.compute.manager [req-f7346e9a-777f-49af-a9a5-d349d856e6c3 req-436de500-e059-4212-8e02-9c1cac31748b service nova] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] No waiting events found dispatching network-vif-plugged-eb7198c7-072e-4cfe-bfdb-5306e3098955 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 770.310229] env[69475]: WARNING nova.compute.manager [req-f7346e9a-777f-49af-a9a5-d349d856e6c3 req-436de500-e059-4212-8e02-9c1cac31748b service nova] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Received unexpected event network-vif-plugged-eb7198c7-072e-4cfe-bfdb-5306e3098955 for instance with vm_state building and task_state block_device_mapping. [ 770.311172] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.784s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.312036] env[69475]: DEBUG nova.compute.manager [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 770.318385] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.073s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.318385] env[69475]: DEBUG nova.objects.instance [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Lazy-loading 'resources' on Instance uuid 4c2e12bf-3f16-47de-a604-44b62a6c7137 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 770.422341] env[69475]: INFO nova.compute.manager [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Took 49.92 seconds to build instance. [ 770.468270] env[69475]: DEBUG nova.compute.manager [req-7532e6fe-f3b5-4ba5-a8ed-3abf9841e52c req-2d7e43a0-c413-4043-b519-90aa10f3eae8 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Received event network-vif-deleted-dd8084ea-8138-439f-a367-0e57562094f5 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 770.468485] env[69475]: INFO nova.compute.manager [req-7532e6fe-f3b5-4ba5-a8ed-3abf9841e52c req-2d7e43a0-c413-4043-b519-90aa10f3eae8 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Neutron deleted interface dd8084ea-8138-439f-a367-0e57562094f5; detaching it from the instance and deleting it from the info cache [ 770.468673] env[69475]: DEBUG nova.network.neutron [req-7532e6fe-f3b5-4ba5-a8ed-3abf9841e52c req-2d7e43a0-c413-4043-b519-90aa10f3eae8 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.498321] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507983, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.592711} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.498693] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] df73dd41-7455-4482-abb2-b61b26fcf403/df73dd41-7455-4482-abb2-b61b26fcf403.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 770.498959] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 770.499308] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6349cec1-7d85-4903-9b1e-82278e2315c5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.507371] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 770.507371] env[69475]: value = "task-3507985" [ 770.507371] env[69475]: _type = "Task" [ 770.507371] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.513335] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "3e332e28-5db5-4f04-8a47-95406da16e21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.515465] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "3e332e28-5db5-4f04-8a47-95406da16e21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.515465] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "3e332e28-5db5-4f04-8a47-95406da16e21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.515623] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "3e332e28-5db5-4f04-8a47-95406da16e21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.515800] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "3e332e28-5db5-4f04-8a47-95406da16e21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.524060] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507985, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.524060] env[69475]: INFO nova.compute.manager [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Terminating instance [ 770.545609] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Acquiring lock "refresh_cache-8bea34ef-0caf-4cdb-a689-dd747d9b52ea" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.545779] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Acquired lock "refresh_cache-8bea34ef-0caf-4cdb-a689-dd747d9b52ea" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.545901] env[69475]: DEBUG nova.network.neutron [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 770.615945] env[69475]: DEBUG nova.network.neutron [-] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.771183] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507984, 'name': CreateVM_Task, 'duration_secs': 0.468691} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.771504] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 770.772294] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.772492] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.772840] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 770.773140] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-511322f0-4955-4483-ada6-387908b72402 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.781508] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 770.781508] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52fbbf2f-a9fd-681a-d2e7-d35493a253db" [ 770.781508] env[69475]: _type = "Task" [ 770.781508] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.792021] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52fbbf2f-a9fd-681a-d2e7-d35493a253db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.820615] env[69475]: DEBUG nova.compute.utils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 770.827816] env[69475]: DEBUG nova.compute.manager [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 770.828019] env[69475]: DEBUG nova.network.neutron [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 770.897579] env[69475]: DEBUG nova.policy [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35701016696a4f57a1c34462e46e99d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '02d595a3575a40799470947426047e69', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 770.923543] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec187beb-9e9f-4080-9fde-915beefc3c50 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "baf27027-678d-4167-bb9b-df410aeb0e82" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.554s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.971125] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3c8eef96-a2ed-4016-904f-70dc68eb4a66 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.982837] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1da397-68ac-4941-9226-cedcf0c81fff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.024947] env[69475]: DEBUG nova.compute.manager [req-7532e6fe-f3b5-4ba5-a8ed-3abf9841e52c req-2d7e43a0-c413-4043-b519-90aa10f3eae8 service nova] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Detach interface failed, port_id=dd8084ea-8138-439f-a367-0e57562094f5, reason: Instance 00ba5cd8-3516-4059-bcda-c2d01e165e07 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 771.032114] env[69475]: DEBUG nova.compute.manager [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 771.032352] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 771.033899] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8011b76e-b803-41cc-9ec9-b452cd9bf7bb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.041555] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507985, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.09097} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.043716] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 771.044875] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 771.044875] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3187ce54-79fe-43e7-9f48-7664bd64b617 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.047537] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b48ba3c9-98a3-43da-afca-30ffbe1fab65 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.081020] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] df73dd41-7455-4482-abb2-b61b26fcf403/df73dd41-7455-4482-abb2-b61b26fcf403.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 771.085078] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2079cf35-0c75-4d27-b827-64e6288aa0b3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.102482] env[69475]: DEBUG oslo_vmware.api [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 771.102482] env[69475]: value = "task-3507986" [ 771.102482] env[69475]: _type = "Task" [ 771.102482] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.104032] env[69475]: DEBUG nova.network.neutron [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.112669] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 771.112669] env[69475]: value = "task-3507987" [ 771.112669] env[69475]: _type = "Task" [ 771.112669] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.119733] env[69475]: INFO nova.compute.manager [-] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Took 2.25 seconds to deallocate network for instance. [ 771.120153] env[69475]: DEBUG oslo_vmware.api [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507986, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.134701] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507987, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.291893] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52fbbf2f-a9fd-681a-d2e7-d35493a253db, 'name': SearchDatastore_Task, 'duration_secs': 0.015674} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.292226] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.292710] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 771.292909] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.294289] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.294289] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 771.295839] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-798f4535-e20d-415a-b2da-c022cfaed549 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.306722] env[69475]: DEBUG oslo_vmware.rw_handles [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524fed73-e72c-a63f-fd38-d2046524b15d/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 771.308069] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7128f4d4-fa76-404c-8776-d51bb1abb760 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.314667] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 771.314861] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 771.322899] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7cf0ea6a-4346-4d89-bf7b-3d73ba8cb4a5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.327421] env[69475]: DEBUG oslo_vmware.rw_handles [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524fed73-e72c-a63f-fd38-d2046524b15d/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 771.327625] env[69475]: ERROR oslo_vmware.rw_handles [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524fed73-e72c-a63f-fd38-d2046524b15d/disk-0.vmdk due to incomplete transfer. [ 771.328550] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-a5abbc0c-8e33-4c51-bfb6-ccfdb4344d03 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.331471] env[69475]: DEBUG nova.compute.manager [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 771.335833] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 771.335833] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528acf8d-f202-7654-2fb4-2d056339fdd3" [ 771.335833] env[69475]: _type = "Task" [ 771.335833] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.348236] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528acf8d-f202-7654-2fb4-2d056339fdd3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.349824] env[69475]: DEBUG oslo_vmware.rw_handles [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524fed73-e72c-a63f-fd38-d2046524b15d/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 771.350082] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Uploaded image a7eee3fc-bdd8-4636-932c-95c9509210a5 to the Glance image server {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 771.352883] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 771.353243] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b1e19d69-a4b7-441d-b3e2-5b9b63f2ba68 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.360258] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 771.360258] env[69475]: value = "task-3507988" [ 771.360258] env[69475]: _type = "Task" [ 771.360258] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.369946] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507988, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.395460] env[69475]: DEBUG nova.network.neutron [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Updating instance_info_cache with network_info: [{"id": "eb7198c7-072e-4cfe-bfdb-5306e3098955", "address": "fa:16:3e:0f:e3:ee", "network": {"id": "2dd39019-e771-409d-a18f-fe2ab0f534f9", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1698322861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c76e3845a6d64757b175062c3e2c6198", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb7198c7-07", "ovs_interfaceid": "eb7198c7-072e-4cfe-bfdb-5306e3098955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.427515] env[69475]: DEBUG nova.compute.manager [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 771.428044] env[69475]: DEBUG nova.virt.hardware [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 771.428260] env[69475]: DEBUG nova.virt.hardware [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 771.428599] env[69475]: DEBUG nova.virt.hardware [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 771.428848] env[69475]: DEBUG nova.virt.hardware [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 771.428999] env[69475]: DEBUG nova.virt.hardware [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 771.429164] env[69475]: DEBUG nova.virt.hardware [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 771.429410] env[69475]: DEBUG nova.virt.hardware [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 771.429520] env[69475]: DEBUG nova.virt.hardware [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 771.429689] env[69475]: DEBUG nova.virt.hardware [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 771.429851] env[69475]: DEBUG nova.virt.hardware [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 771.430104] env[69475]: DEBUG nova.virt.hardware [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 771.430912] env[69475]: DEBUG nova.compute.manager [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 771.433855] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f918eda6-9de8-4b40-abf4-2cc96e8d17f5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.442500] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8e86af-0f0b-4057-8e1e-05cca458c3c5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.473758] env[69475]: DEBUG nova.network.neutron [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Successfully created port: c3c6dc43-00e2-4fba-acf9-0f100d3cf239 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 771.581388] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab219caa-05a3-4861-973c-6d02f01e19e1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.588975] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18d5fcad-0d49-4810-93d1-9a86e87cf30e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.629726] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17e3e1f-5ea5-425b-b9a6-f55951dfb5e9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.633956] env[69475]: DEBUG oslo_concurrency.lockutils [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.645356] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa97dc4a-21d6-412b-9c48-e878d5b24336 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.649552] env[69475]: DEBUG oslo_vmware.api [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507986, 'name': PowerOffVM_Task, 'duration_secs': 0.542689} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.649781] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507987, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.650050] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 771.650224] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 771.650857] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c20ab214-5b39-4359-8eb4-66b7b46b395e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.660713] env[69475]: DEBUG nova.compute.provider_tree [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 771.720706] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 771.720938] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 771.721136] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Deleting the datastore file [datastore1] 3e332e28-5db5-4f04-8a47-95406da16e21 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 771.721480] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d57594e-e309-4363-a232-22391c0e2574 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.728429] env[69475]: DEBUG oslo_vmware.api [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 771.728429] env[69475]: value = "task-3507990" [ 771.728429] env[69475]: _type = "Task" [ 771.728429] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.738240] env[69475]: DEBUG oslo_vmware.api [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507990, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.850918] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528acf8d-f202-7654-2fb4-2d056339fdd3, 'name': SearchDatastore_Task, 'duration_secs': 0.013636} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.851872] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca89b0b4-49c8-403e-a6e0-2c819e4bcae7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.858096] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 771.858096] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52593460-d2a4-3b32-a314-28d964debeaf" [ 771.858096] env[69475]: _type = "Task" [ 771.858096] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.876824] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507988, 'name': Destroy_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.877467] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52593460-d2a4-3b32-a314-28d964debeaf, 'name': SearchDatastore_Task, 'duration_secs': 0.012022} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.877467] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.877664] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] a75d7a92-4ac7-4fa0-90f0-f0a0993e881e/a75d7a92-4ac7-4fa0-90f0-f0a0993e881e.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 771.877914] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df6fc840-9ba4-449c-9345-d47320d5c032 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.885282] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 771.885282] env[69475]: value = "task-3507991" [ 771.885282] env[69475]: _type = "Task" [ 771.885282] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.897020] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507991, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.898693] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Releasing lock "refresh_cache-8bea34ef-0caf-4cdb-a689-dd747d9b52ea" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.899022] env[69475]: DEBUG nova.compute.manager [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Instance network_info: |[{"id": "eb7198c7-072e-4cfe-bfdb-5306e3098955", "address": "fa:16:3e:0f:e3:ee", "network": {"id": "2dd39019-e771-409d-a18f-fe2ab0f534f9", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1698322861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c76e3845a6d64757b175062c3e2c6198", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb7198c7-07", "ovs_interfaceid": "eb7198c7-072e-4cfe-bfdb-5306e3098955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 771.899508] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:e3:ee', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '74e6f6e0-95e6-4531-99e9-0e78350fb655', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eb7198c7-072e-4cfe-bfdb-5306e3098955', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 771.908499] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Creating folder: Project (c76e3845a6d64757b175062c3e2c6198). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 771.912037] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49155b61-fc15-4890-bca5-69d3b7f794df {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.926515] env[69475]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 771.926693] env[69475]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=69475) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 771.927111] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Folder already exists: Project (c76e3845a6d64757b175062c3e2c6198). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 771.927326] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Creating folder: Instances. Parent ref: group-v700887. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 771.927564] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e5f6daa-78c6-4bbf-ac84-671c6f6e6f89 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.945522] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Created folder: Instances in parent group-v700887. [ 771.945818] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 771.950457] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 771.950457] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a990d300-4f09-420c-9f81-0467e352cdad {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.968799] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 771.978036] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 771.978036] env[69475]: value = "task-3507994" [ 771.978036] env[69475]: _type = "Task" [ 771.978036] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.985637] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507994, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.133448] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507987, 'name': ReconfigVM_Task, 'duration_secs': 0.838246} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.133765] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Reconfigured VM instance instance-0000002d to attach disk [datastore2] df73dd41-7455-4482-abb2-b61b26fcf403/df73dd41-7455-4482-abb2-b61b26fcf403.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 772.134517] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-607bcebd-d65b-40b3-b476-2684c1f52da1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.144009] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 772.144009] env[69475]: value = "task-3507995" [ 772.144009] env[69475]: _type = "Task" [ 772.144009] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.157788] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507995, 'name': Rename_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.185411] env[69475]: ERROR nova.scheduler.client.report [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] [req-4ba69574-645c-40ec-a454-579c824501ac] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dd221100-68c1-4a75-92b5-b24d81fee5da. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4ba69574-645c-40ec-a454-579c824501ac"}]} [ 772.212376] env[69475]: DEBUG nova.scheduler.client.report [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Refreshing inventories for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 772.234531] env[69475]: DEBUG nova.scheduler.client.report [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Updating ProviderTree inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 772.234815] env[69475]: DEBUG nova.compute.provider_tree [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 772.245308] env[69475]: DEBUG oslo_vmware.api [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3507990, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.245778} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.245745] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 772.246032] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 772.246307] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 772.246987] env[69475]: INFO nova.compute.manager [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Took 1.21 seconds to destroy the instance on the hypervisor. [ 772.246987] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 772.247238] env[69475]: DEBUG nova.compute.manager [-] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 772.247433] env[69475]: DEBUG nova.network.neutron [-] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 772.250706] env[69475]: DEBUG nova.scheduler.client.report [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Refreshing aggregate associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, aggregates: None {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 772.280537] env[69475]: DEBUG nova.scheduler.client.report [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Refreshing trait associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 772.347280] env[69475]: DEBUG nova.compute.manager [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 772.378939] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507988, 'name': Destroy_Task, 'duration_secs': 0.643833} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.385085] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 772.385358] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 772.385525] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 772.385689] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 772.385838] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 772.385987] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 772.386219] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 772.386383] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 772.386552] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 772.386796] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 772.386878] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 772.387234] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Destroyed the VM [ 772.387641] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 772.388448] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3564d20-cb4c-4502-a455-5d62450c82f3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.392766] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-be8fc4d6-20e4-41e6-895a-3e3341b1acfd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.399750] env[69475]: DEBUG nova.compute.manager [req-b02adf15-f4b7-4f18-8bd8-55dd8b49a53a req-3c6becde-974e-4b96-9cd6-5ff46ee33deb service nova] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Received event network-changed-eb7198c7-072e-4cfe-bfdb-5306e3098955 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 772.399978] env[69475]: DEBUG nova.compute.manager [req-b02adf15-f4b7-4f18-8bd8-55dd8b49a53a req-3c6becde-974e-4b96-9cd6-5ff46ee33deb service nova] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Refreshing instance network info cache due to event network-changed-eb7198c7-072e-4cfe-bfdb-5306e3098955. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 772.400326] env[69475]: DEBUG oslo_concurrency.lockutils [req-b02adf15-f4b7-4f18-8bd8-55dd8b49a53a req-3c6becde-974e-4b96-9cd6-5ff46ee33deb service nova] Acquiring lock "refresh_cache-8bea34ef-0caf-4cdb-a689-dd747d9b52ea" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.400454] env[69475]: DEBUG oslo_concurrency.lockutils [req-b02adf15-f4b7-4f18-8bd8-55dd8b49a53a req-3c6becde-974e-4b96-9cd6-5ff46ee33deb service nova] Acquired lock "refresh_cache-8bea34ef-0caf-4cdb-a689-dd747d9b52ea" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 772.401211] env[69475]: DEBUG nova.network.neutron [req-b02adf15-f4b7-4f18-8bd8-55dd8b49a53a req-3c6becde-974e-4b96-9cd6-5ff46ee33deb service nova] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Refreshing network info cache for port eb7198c7-072e-4cfe-bfdb-5306e3098955 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 772.414687] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2452a2aa-ab2f-450c-a656-1418377857ce {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.418933] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 772.418933] env[69475]: value = "task-3507996" [ 772.418933] env[69475]: _type = "Task" [ 772.418933] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.419512] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507991, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.443083] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507996, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.488553] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3507994, 'name': CreateVM_Task, 'duration_secs': 0.408961} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.491246] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 772.492226] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700903', 'volume_id': 'f89046dd-6d18-4fc2-bdc5-f7976aa2861d', 'name': 'volume-f89046dd-6d18-4fc2-bdc5-f7976aa2861d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8bea34ef-0caf-4cdb-a689-dd747d9b52ea', 'attached_at': '', 'detached_at': '', 'volume_id': 'f89046dd-6d18-4fc2-bdc5-f7976aa2861d', 'serial': 'f89046dd-6d18-4fc2-bdc5-f7976aa2861d'}, 'device_type': None, 'attachment_id': 'de8e7333-e7d9-476f-981d-83b361f13ee8', 'mount_device': '/dev/sda', 'delete_on_termination': True, 'boot_index': 0, 'guest_format': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69475) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 772.493169] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Root volume attach. Driver type: vmdk {{(pid=69475) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 772.493298] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-099bb71e-386c-4d59-9a8f-9ddb1ecbbbc1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.501932] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84dafe1d-b0c9-40bc-9d66-4cb745396e71 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.510798] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b317d7d-4b9b-4eaf-9f11-7c652c159b6b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.522758] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-0962148a-2ba2-4fec-992c-76fcafe8b302 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.530320] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Waiting for the task: (returnval){ [ 772.530320] env[69475]: value = "task-3507997" [ 772.530320] env[69475]: _type = "Task" [ 772.530320] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.538839] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3507997, 'name': RelocateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.658608] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507995, 'name': Rename_Task, 'duration_secs': 0.377324} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.659445] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 772.659445] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1c5b32ac-b1ad-4633-80e1-431e14f86514 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.669395] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 772.669395] env[69475]: value = "task-3507998" [ 772.669395] env[69475]: _type = "Task" [ 772.669395] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.679199] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507998, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.899827] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507991, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.550093} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.901320] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] a75d7a92-4ac7-4fa0-90f0-f0a0993e881e/a75d7a92-4ac7-4fa0-90f0-f0a0993e881e.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 772.901551] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 772.902439] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-424b7e6b-9212-4fd0-a882-abe6f5a68fcc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.905371] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-18e290f3-e573-477d-b7ce-1b03f99eca4e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.918484] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a8c605-21da-494a-90fa-6fba26a7fd7a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.922446] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 772.922446] env[69475]: value = "task-3507999" [ 772.922446] env[69475]: _type = "Task" [ 772.922446] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.960464] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bc1e286-656d-440a-8885-baf3dee83334 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.971104] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507996, 'name': RemoveSnapshot_Task} progress is 50%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.971325] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507999, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.978679] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6989ad3-0917-41e8-b217-64409b8c621a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.995925] env[69475]: DEBUG nova.compute.provider_tree [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 773.042534] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3507997, 'name': RelocateVM_Task} progress is 38%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.068684] env[69475]: DEBUG nova.network.neutron [-] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.098534] env[69475]: DEBUG nova.compute.manager [req-65ebeaf7-212c-425a-a5a1-ae6457ff34e5 req-67cbe8ee-21c5-4e1f-a9d6-b8955be03e4d service nova] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Received event network-vif-plugged-c3c6dc43-00e2-4fba-acf9-0f100d3cf239 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 773.098717] env[69475]: DEBUG oslo_concurrency.lockutils [req-65ebeaf7-212c-425a-a5a1-ae6457ff34e5 req-67cbe8ee-21c5-4e1f-a9d6-b8955be03e4d service nova] Acquiring lock "3fba85c9-7798-4a66-b335-21f80962e0bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.098931] env[69475]: DEBUG oslo_concurrency.lockutils [req-65ebeaf7-212c-425a-a5a1-ae6457ff34e5 req-67cbe8ee-21c5-4e1f-a9d6-b8955be03e4d service nova] Lock "3fba85c9-7798-4a66-b335-21f80962e0bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 773.099108] env[69475]: DEBUG oslo_concurrency.lockutils [req-65ebeaf7-212c-425a-a5a1-ae6457ff34e5 req-67cbe8ee-21c5-4e1f-a9d6-b8955be03e4d service nova] Lock "3fba85c9-7798-4a66-b335-21f80962e0bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 773.099396] env[69475]: DEBUG nova.compute.manager [req-65ebeaf7-212c-425a-a5a1-ae6457ff34e5 req-67cbe8ee-21c5-4e1f-a9d6-b8955be03e4d service nova] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] No waiting events found dispatching network-vif-plugged-c3c6dc43-00e2-4fba-acf9-0f100d3cf239 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 773.099632] env[69475]: WARNING nova.compute.manager [req-65ebeaf7-212c-425a-a5a1-ae6457ff34e5 req-67cbe8ee-21c5-4e1f-a9d6-b8955be03e4d service nova] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Received unexpected event network-vif-plugged-c3c6dc43-00e2-4fba-acf9-0f100d3cf239 for instance with vm_state building and task_state spawning. [ 773.166757] env[69475]: DEBUG nova.network.neutron [req-b02adf15-f4b7-4f18-8bd8-55dd8b49a53a req-3c6becde-974e-4b96-9cd6-5ff46ee33deb service nova] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Updated VIF entry in instance network info cache for port eb7198c7-072e-4cfe-bfdb-5306e3098955. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 773.166757] env[69475]: DEBUG nova.network.neutron [req-b02adf15-f4b7-4f18-8bd8-55dd8b49a53a req-3c6becde-974e-4b96-9cd6-5ff46ee33deb service nova] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Updating instance_info_cache with network_info: [{"id": "eb7198c7-072e-4cfe-bfdb-5306e3098955", "address": "fa:16:3e:0f:e3:ee", "network": {"id": "2dd39019-e771-409d-a18f-fe2ab0f534f9", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1698322861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c76e3845a6d64757b175062c3e2c6198", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb7198c7-07", "ovs_interfaceid": "eb7198c7-072e-4cfe-bfdb-5306e3098955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.183260] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507998, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.356158] env[69475]: DEBUG nova.network.neutron [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Successfully updated port: c3c6dc43-00e2-4fba-acf9-0f100d3cf239 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 773.439262] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507999, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.442689] env[69475]: DEBUG oslo_vmware.api [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3507996, 'name': RemoveSnapshot_Task, 'duration_secs': 0.976018} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.442963] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 773.443200] env[69475]: INFO nova.compute.manager [None req-dddc4100-f20f-405a-8762-b0b7ce5cae54 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Took 16.90 seconds to snapshot the instance on the hypervisor. [ 773.537179] env[69475]: DEBUG nova.scheduler.client.report [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Updated inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da with generation 70 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 773.537434] env[69475]: DEBUG nova.compute.provider_tree [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Updating resource provider dd221100-68c1-4a75-92b5-b24d81fee5da generation from 70 to 71 during operation: update_inventory {{(pid=69475) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 773.537628] env[69475]: DEBUG nova.compute.provider_tree [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 773.545432] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3507997, 'name': RelocateVM_Task} progress is 53%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.568446] env[69475]: INFO nova.compute.manager [-] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Took 1.32 seconds to deallocate network for instance. [ 773.668149] env[69475]: DEBUG oslo_concurrency.lockutils [req-b02adf15-f4b7-4f18-8bd8-55dd8b49a53a req-3c6becde-974e-4b96-9cd6-5ff46ee33deb service nova] Releasing lock "refresh_cache-8bea34ef-0caf-4cdb-a689-dd747d9b52ea" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 773.668436] env[69475]: DEBUG nova.compute.manager [req-b02adf15-f4b7-4f18-8bd8-55dd8b49a53a req-3c6becde-974e-4b96-9cd6-5ff46ee33deb service nova] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Received event network-changed-4059da75-efc8-42ee-90b1-8202220d1621 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 773.668657] env[69475]: DEBUG nova.compute.manager [req-b02adf15-f4b7-4f18-8bd8-55dd8b49a53a req-3c6becde-974e-4b96-9cd6-5ff46ee33deb service nova] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Refreshing instance network info cache due to event network-changed-4059da75-efc8-42ee-90b1-8202220d1621. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 773.668822] env[69475]: DEBUG oslo_concurrency.lockutils [req-b02adf15-f4b7-4f18-8bd8-55dd8b49a53a req-3c6becde-974e-4b96-9cd6-5ff46ee33deb service nova] Acquiring lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.669039] env[69475]: DEBUG oslo_concurrency.lockutils [req-b02adf15-f4b7-4f18-8bd8-55dd8b49a53a req-3c6becde-974e-4b96-9cd6-5ff46ee33deb service nova] Acquired lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.669140] env[69475]: DEBUG nova.network.neutron [req-b02adf15-f4b7-4f18-8bd8-55dd8b49a53a req-3c6becde-974e-4b96-9cd6-5ff46ee33deb service nova] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Refreshing network info cache for port 4059da75-efc8-42ee-90b1-8202220d1621 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 773.684806] env[69475]: DEBUG oslo_vmware.api [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3507998, 'name': PowerOnVM_Task, 'duration_secs': 0.927421} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.685120] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 773.685847] env[69475]: INFO nova.compute.manager [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Took 9.84 seconds to spawn the instance on the hypervisor. [ 773.686090] env[69475]: DEBUG nova.compute.manager [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 773.687288] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eeda496-7e63-49ca-81f7-a7814fa78204 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.859978] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "refresh_cache-3fba85c9-7798-4a66-b335-21f80962e0bd" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.860167] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquired lock "refresh_cache-3fba85c9-7798-4a66-b335-21f80962e0bd" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.860333] env[69475]: DEBUG nova.network.neutron [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 773.935811] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3507999, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.687798} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.936176] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 773.937038] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee8506cd-f23c-46c1-bae8-14797880848b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.970027] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Reconfiguring VM instance instance-0000002e to attach disk [datastore2] a75d7a92-4ac7-4fa0-90f0-f0a0993e881e/a75d7a92-4ac7-4fa0-90f0-f0a0993e881e.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 773.974069] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46047d3b-1361-45ca-b080-4ca5943e5b9e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.994341] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 773.994341] env[69475]: value = "task-3508000" [ 773.994341] env[69475]: _type = "Task" [ 773.994341] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.007175] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508000, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.043449] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.726s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.045673] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3507997, 'name': RelocateVM_Task} progress is 65%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.046199] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.534s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.046425] env[69475]: DEBUG nova.objects.instance [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lazy-loading 'resources' on Instance uuid 2dd98ffd-b0e6-4447-9c82-57713dc37abd {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 774.065654] env[69475]: INFO nova.scheduler.client.report [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Deleted allocations for instance 4c2e12bf-3f16-47de-a604-44b62a6c7137 [ 774.077694] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.208839] env[69475]: INFO nova.compute.manager [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Took 49.10 seconds to build instance. [ 774.405423] env[69475]: DEBUG nova.network.neutron [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 774.505689] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508000, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.551746] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3507997, 'name': RelocateVM_Task} progress is 78%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.556412] env[69475]: DEBUG nova.network.neutron [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Updating instance_info_cache with network_info: [{"id": "c3c6dc43-00e2-4fba-acf9-0f100d3cf239", "address": "fa:16:3e:4d:46:0d", "network": {"id": "c5897d3f-e695-4647-81ec-1226e3bd3b3c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1437300532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02d595a3575a40799470947426047e69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3c6dc43-00", "ovs_interfaceid": "c3c6dc43-00e2-4fba-acf9-0f100d3cf239", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.576572] env[69475]: DEBUG nova.compute.manager [req-1e277b76-ba7a-41ac-a403-f6b9108d994e req-7e248f44-a462-4fcc-a795-138d85316ce4 service nova] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Received event network-vif-deleted-3b284ec5-f19b-4688-9bab-a6fb120cc7d8 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 774.577796] env[69475]: DEBUG nova.network.neutron [req-b02adf15-f4b7-4f18-8bd8-55dd8b49a53a req-3c6becde-974e-4b96-9cd6-5ff46ee33deb service nova] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updated VIF entry in instance network info cache for port 4059da75-efc8-42ee-90b1-8202220d1621. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 774.578222] env[69475]: DEBUG nova.network.neutron [req-b02adf15-f4b7-4f18-8bd8-55dd8b49a53a req-3c6becde-974e-4b96-9cd6-5ff46ee33deb service nova] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating instance_info_cache with network_info: [{"id": "4059da75-efc8-42ee-90b1-8202220d1621", "address": "fa:16:3e:1e:8b:99", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4059da75-ef", "ovs_interfaceid": "4059da75-efc8-42ee-90b1-8202220d1621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.582797] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c0d74f2a-4af5-4391-8f04-faa3a8723b95 tempest-ImagesOneServerTestJSON-1208980985 tempest-ImagesOneServerTestJSON-1208980985-project-member] Lock "4c2e12bf-3f16-47de-a604-44b62a6c7137" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.964s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.712616] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5d1b98f-eec8-485a-980d-bdd75304d733 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "df73dd41-7455-4482-abb2-b61b26fcf403" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.097s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 775.012344] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508000, 'name': ReconfigVM_Task, 'duration_secs': 0.884414} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.012344] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Reconfigured VM instance instance-0000002e to attach disk [datastore2] a75d7a92-4ac7-4fa0-90f0-f0a0993e881e/a75d7a92-4ac7-4fa0-90f0-f0a0993e881e.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 775.012344] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2b374a8b-4b3a-4574-bcc0-e6a9219fdd82 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.021980] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 775.021980] env[69475]: value = "task-3508001" [ 775.021980] env[69475]: _type = "Task" [ 775.021980] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.034529] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508001, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.044562] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3507997, 'name': RelocateVM_Task} progress is 92%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.059360] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Releasing lock "refresh_cache-3fba85c9-7798-4a66-b335-21f80962e0bd" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.063022] env[69475]: DEBUG nova.compute.manager [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Instance network_info: |[{"id": "c3c6dc43-00e2-4fba-acf9-0f100d3cf239", "address": "fa:16:3e:4d:46:0d", "network": {"id": "c5897d3f-e695-4647-81ec-1226e3bd3b3c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1437300532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02d595a3575a40799470947426047e69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3c6dc43-00", "ovs_interfaceid": "c3c6dc43-00e2-4fba-acf9-0f100d3cf239", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 775.063022] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:46:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f92f0b92-d6fb-4d00-8ad5-6b3809ed5493', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c3c6dc43-00e2-4fba-acf9-0f100d3cf239', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 775.070375] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 775.076542] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 775.076973] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2ff03f0-31aa-4f40-8c7b-90962f004463 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.094474] env[69475]: DEBUG oslo_concurrency.lockutils [req-b02adf15-f4b7-4f18-8bd8-55dd8b49a53a req-3c6becde-974e-4b96-9cd6-5ff46ee33deb service nova] Releasing lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.101202] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 775.101202] env[69475]: value = "task-3508002" [ 775.101202] env[69475]: _type = "Task" [ 775.101202] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.110332] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508002, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.143160] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4dded9-1c5b-4c60-84e9-acc236d190ec {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.150910] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09144e23-8fa2-43da-928c-a23ae441c06f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.183566] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ebd49ba-17e3-46ef-8ae1-ada83d33e125 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.191930] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c189c4-ed91-4a57-8b96-931dfdde514d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.206722] env[69475]: DEBUG nova.compute.provider_tree [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 775.216591] env[69475]: DEBUG nova.compute.manager [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 775.533475] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508001, 'name': Rename_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.543133] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3507997, 'name': RelocateVM_Task} progress is 97%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.614196] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508002, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.711084] env[69475]: DEBUG nova.scheduler.client.report [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 775.741990] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 775.825355] env[69475]: DEBUG nova.compute.manager [req-3bd05402-20dc-4c22-b237-7e985411cfcc req-b756c51f-2d8b-4954-8eae-3381bcb49d32 service nova] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Received event network-changed-c3c6dc43-00e2-4fba-acf9-0f100d3cf239 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 775.825355] env[69475]: DEBUG nova.compute.manager [req-3bd05402-20dc-4c22-b237-7e985411cfcc req-b756c51f-2d8b-4954-8eae-3381bcb49d32 service nova] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Refreshing instance network info cache due to event network-changed-c3c6dc43-00e2-4fba-acf9-0f100d3cf239. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 775.825355] env[69475]: DEBUG oslo_concurrency.lockutils [req-3bd05402-20dc-4c22-b237-7e985411cfcc req-b756c51f-2d8b-4954-8eae-3381bcb49d32 service nova] Acquiring lock "refresh_cache-3fba85c9-7798-4a66-b335-21f80962e0bd" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.825355] env[69475]: DEBUG oslo_concurrency.lockutils [req-3bd05402-20dc-4c22-b237-7e985411cfcc req-b756c51f-2d8b-4954-8eae-3381bcb49d32 service nova] Acquired lock "refresh_cache-3fba85c9-7798-4a66-b335-21f80962e0bd" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 775.825355] env[69475]: DEBUG nova.network.neutron [req-3bd05402-20dc-4c22-b237-7e985411cfcc req-b756c51f-2d8b-4954-8eae-3381bcb49d32 service nova] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Refreshing network info cache for port c3c6dc43-00e2-4fba-acf9-0f100d3cf239 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 776.035308] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508001, 'name': Rename_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.043763] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3507997, 'name': RelocateVM_Task} progress is 97%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.110909] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508002, 'name': CreateVM_Task, 'duration_secs': 0.938184} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.111196] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 776.111972] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.112203] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.112591] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 776.112897] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6d257e0-7778-476f-b055-d05b6a90d9a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.117388] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 776.117388] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c057d4-073e-78cd-cc30-016d7ff1f25e" [ 776.117388] env[69475]: _type = "Task" [ 776.117388] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.126390] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c057d4-073e-78cd-cc30-016d7ff1f25e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.218216] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.172s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.221225] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.145s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.221519] env[69475]: DEBUG nova.objects.instance [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Lazy-loading 'resources' on Instance uuid 8fbabf86-be9e-47ec-8c4c-adea4c68abe8 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 776.240937] env[69475]: INFO nova.scheduler.client.report [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Deleted allocations for instance 2dd98ffd-b0e6-4447-9c82-57713dc37abd [ 776.348877] env[69475]: DEBUG nova.compute.manager [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 776.349890] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8255afda-7893-49b6-b6f0-5d3998b47a3a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.391449] env[69475]: DEBUG oslo_concurrency.lockutils [None req-77da5ee8-0e01-463f-9a67-080356e05ff4 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquiring lock "df73dd41-7455-4482-abb2-b61b26fcf403" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 776.391839] env[69475]: DEBUG oslo_concurrency.lockutils [None req-77da5ee8-0e01-463f-9a67-080356e05ff4 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "df73dd41-7455-4482-abb2-b61b26fcf403" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.392138] env[69475]: INFO nova.compute.manager [None req-77da5ee8-0e01-463f-9a67-080356e05ff4 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Rebooting instance [ 776.539026] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508001, 'name': Rename_Task, 'duration_secs': 1.163197} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.541043] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 776.541759] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-461a35a1-0206-48e4-9d84-2e07f46aa3ed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.550818] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3507997, 'name': RelocateVM_Task} progress is 98%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.552114] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 776.552114] env[69475]: value = "task-3508003" [ 776.552114] env[69475]: _type = "Task" [ 776.552114] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.559688] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508003, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.629027] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c057d4-073e-78cd-cc30-016d7ff1f25e, 'name': SearchDatastore_Task, 'duration_secs': 0.011302} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.629027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.629027] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 776.629027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.629027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.629027] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 776.629027] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81620b25-b67e-4f01-a632-955d91ea0a80 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.645150] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 776.645150] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 776.645150] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d1a18d8-41e7-4035-b651-a6a377123979 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.649553] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 776.649553] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521d1072-1c80-41ef-23c2-fcec545c0914" [ 776.649553] env[69475]: _type = "Task" [ 776.649553] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.657984] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521d1072-1c80-41ef-23c2-fcec545c0914, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.752627] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3e35c031-eca1-4d7c-ae1c-0bc34e0c0f0d tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "2dd98ffd-b0e6-4447-9c82-57713dc37abd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.746s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.861374] env[69475]: DEBUG nova.network.neutron [req-3bd05402-20dc-4c22-b237-7e985411cfcc req-b756c51f-2d8b-4954-8eae-3381bcb49d32 service nova] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Updated VIF entry in instance network info cache for port c3c6dc43-00e2-4fba-acf9-0f100d3cf239. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 776.861758] env[69475]: DEBUG nova.network.neutron [req-3bd05402-20dc-4c22-b237-7e985411cfcc req-b756c51f-2d8b-4954-8eae-3381bcb49d32 service nova] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Updating instance_info_cache with network_info: [{"id": "c3c6dc43-00e2-4fba-acf9-0f100d3cf239", "address": "fa:16:3e:4d:46:0d", "network": {"id": "c5897d3f-e695-4647-81ec-1226e3bd3b3c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1437300532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02d595a3575a40799470947426047e69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc3c6dc43-00", "ovs_interfaceid": "c3c6dc43-00e2-4fba-acf9-0f100d3cf239", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.864772] env[69475]: INFO nova.compute.manager [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] instance snapshotting [ 776.873054] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7701f98-f783-4855-9364-ff2c87df3bca {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.902267] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ec4d3a-2bbf-481c-8c54-5e9c9c5b1200 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.929913] env[69475]: DEBUG oslo_concurrency.lockutils [None req-77da5ee8-0e01-463f-9a67-080356e05ff4 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquiring lock "refresh_cache-df73dd41-7455-4482-abb2-b61b26fcf403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.929913] env[69475]: DEBUG oslo_concurrency.lockutils [None req-77da5ee8-0e01-463f-9a67-080356e05ff4 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquired lock "refresh_cache-df73dd41-7455-4482-abb2-b61b26fcf403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.929913] env[69475]: DEBUG nova.network.neutron [None req-77da5ee8-0e01-463f-9a67-080356e05ff4 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 777.052380] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3507997, 'name': RelocateVM_Task, 'duration_secs': 4.076002} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.055230] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Volume attach. Driver type: vmdk {{(pid=69475) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 777.055418] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700903', 'volume_id': 'f89046dd-6d18-4fc2-bdc5-f7976aa2861d', 'name': 'volume-f89046dd-6d18-4fc2-bdc5-f7976aa2861d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8bea34ef-0caf-4cdb-a689-dd747d9b52ea', 'attached_at': '', 'detached_at': '', 'volume_id': 'f89046dd-6d18-4fc2-bdc5-f7976aa2861d', 'serial': 'f89046dd-6d18-4fc2-bdc5-f7976aa2861d'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 777.060768] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9f537b-63f1-4005-8869-3770527b504f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.067998] env[69475]: DEBUG oslo_vmware.api [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508003, 'name': PowerOnVM_Task, 'duration_secs': 0.483685} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.082784] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 777.082784] env[69475]: INFO nova.compute.manager [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Took 10.32 seconds to spawn the instance on the hypervisor. [ 777.082784] env[69475]: DEBUG nova.compute.manager [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 777.082784] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdf5972c-3c26-493c-93f5-0b44b2e3957b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.088305] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c08bb0-f45f-4a51-9dd5-d5e56fd809d5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.115203] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] volume-f89046dd-6d18-4fc2-bdc5-f7976aa2861d/volume-f89046dd-6d18-4fc2-bdc5-f7976aa2861d.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 777.118735] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1ef5b723-3ef8-4e64-bce7-3631ae516046 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.139217] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Waiting for the task: (returnval){ [ 777.139217] env[69475]: value = "task-3508004" [ 777.139217] env[69475]: _type = "Task" [ 777.139217] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.148410] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3508004, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.161251] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521d1072-1c80-41ef-23c2-fcec545c0914, 'name': SearchDatastore_Task, 'duration_secs': 0.025867} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.162803] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3745e46c-b093-45f5-8e62-80e74d7c2bb3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.168827] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 777.168827] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52199322-75b4-601b-aa0b-f62fcd35a65c" [ 777.168827] env[69475]: _type = "Task" [ 777.168827] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.180629] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52199322-75b4-601b-aa0b-f62fcd35a65c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.365087] env[69475]: DEBUG oslo_concurrency.lockutils [req-3bd05402-20dc-4c22-b237-7e985411cfcc req-b756c51f-2d8b-4954-8eae-3381bcb49d32 service nova] Releasing lock "refresh_cache-3fba85c9-7798-4a66-b335-21f80962e0bd" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.365431] env[69475]: DEBUG nova.compute.manager [req-3bd05402-20dc-4c22-b237-7e985411cfcc req-b756c51f-2d8b-4954-8eae-3381bcb49d32 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Received event network-changed-7b155c9d-5e5c-499f-bfd8-a2c59e674bc2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 777.365642] env[69475]: DEBUG nova.compute.manager [req-3bd05402-20dc-4c22-b237-7e985411cfcc req-b756c51f-2d8b-4954-8eae-3381bcb49d32 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Refreshing instance network info cache due to event network-changed-7b155c9d-5e5c-499f-bfd8-a2c59e674bc2. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 777.365883] env[69475]: DEBUG oslo_concurrency.lockutils [req-3bd05402-20dc-4c22-b237-7e985411cfcc req-b756c51f-2d8b-4954-8eae-3381bcb49d32 service nova] Acquiring lock "refresh_cache-df73dd41-7455-4482-abb2-b61b26fcf403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 777.368722] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c974f096-c364-445f-bf9d-0e00f29fccb5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.376822] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581fb122-c3d1-48ae-a30e-9e43b239e0cf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.407082] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a075c8-0752-4d3b-9369-82601836d8b4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.414765] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 777.414928] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-601f4241-ee45-46ab-92e0-37b6c4b51a58 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.419747] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a8f92ab-7723-4b7a-9516-38cc5e5dedfe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.437362] env[69475]: DEBUG nova.compute.provider_tree [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 777.440044] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 777.440044] env[69475]: value = "task-3508005" [ 777.440044] env[69475]: _type = "Task" [ 777.440044] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.448958] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508005, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.630135] env[69475]: INFO nova.compute.manager [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Took 50.81 seconds to build instance. [ 777.653903] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3508004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.680013] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52199322-75b4-601b-aa0b-f62fcd35a65c, 'name': SearchDatastore_Task, 'duration_secs': 0.016412} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.680328] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.680652] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 3fba85c9-7798-4a66-b335-21f80962e0bd/3fba85c9-7798-4a66-b335-21f80962e0bd.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 777.681056] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9528536a-8dd3-4f5f-a837-3949e8e7a2a5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.688294] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 777.688294] env[69475]: value = "task-3508006" [ 777.688294] env[69475]: _type = "Task" [ 777.688294] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 777.698635] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508006, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.755872] env[69475]: DEBUG nova.network.neutron [None req-77da5ee8-0e01-463f-9a67-080356e05ff4 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Updating instance_info_cache with network_info: [{"id": "7b155c9d-5e5c-499f-bfd8-a2c59e674bc2", "address": "fa:16:3e:89:09:25", "network": {"id": "1ccefa75-7f28-427a-a2dc-65225b56bc7d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-673445520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47bcbe5bc3a14fbf9ea9617ea7d50342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b155c9d-5e", "ovs_interfaceid": "7b155c9d-5e5c-499f-bfd8-a2c59e674bc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.954159] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508005, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 777.982696] env[69475]: DEBUG nova.scheduler.client.report [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Updated inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da with generation 71 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 777.982904] env[69475]: DEBUG nova.compute.provider_tree [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Updating resource provider dd221100-68c1-4a75-92b5-b24d81fee5da generation from 71 to 72 during operation: update_inventory {{(pid=69475) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 777.982904] env[69475]: DEBUG nova.compute.provider_tree [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 778.134443] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9f6bb65-b21f-4399-a0a2-0aac01ab91e1 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "a75d7a92-4ac7-4fa0-90f0-f0a0993e881e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 104.362s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.157363] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3508004, 'name': ReconfigVM_Task, 'duration_secs': 0.577969} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.157746] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Reconfigured VM instance instance-0000002f to attach disk [datastore1] volume-f89046dd-6d18-4fc2-bdc5-f7976aa2861d/volume-f89046dd-6d18-4fc2-bdc5-f7976aa2861d.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 778.166455] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4799500b-99dc-4e16-a675-90e561f0604d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.186681] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Waiting for the task: (returnval){ [ 778.186681] env[69475]: value = "task-3508007" [ 778.186681] env[69475]: _type = "Task" [ 778.186681] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.201815] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3508007, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.210629] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508006, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.258121] env[69475]: DEBUG oslo_concurrency.lockutils [None req-77da5ee8-0e01-463f-9a67-080356e05ff4 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Releasing lock "refresh_cache-df73dd41-7455-4482-abb2-b61b26fcf403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.259478] env[69475]: DEBUG oslo_concurrency.lockutils [req-3bd05402-20dc-4c22-b237-7e985411cfcc req-b756c51f-2d8b-4954-8eae-3381bcb49d32 service nova] Acquired lock "refresh_cache-df73dd41-7455-4482-abb2-b61b26fcf403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.259696] env[69475]: DEBUG nova.network.neutron [req-3bd05402-20dc-4c22-b237-7e985411cfcc req-b756c51f-2d8b-4954-8eae-3381bcb49d32 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Refreshing network info cache for port 7b155c9d-5e5c-499f-bfd8-a2c59e674bc2 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 778.455145] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508005, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.490350] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.269s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.493048] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.320s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.494636] env[69475]: INFO nova.compute.claims [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 778.527904] env[69475]: INFO nova.scheduler.client.report [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Deleted allocations for instance 8fbabf86-be9e-47ec-8c4c-adea4c68abe8 [ 778.640164] env[69475]: DEBUG nova.compute.manager [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 778.701391] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3508007, 'name': ReconfigVM_Task, 'duration_secs': 0.163934} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.705024] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700903', 'volume_id': 'f89046dd-6d18-4fc2-bdc5-f7976aa2861d', 'name': 'volume-f89046dd-6d18-4fc2-bdc5-f7976aa2861d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8bea34ef-0caf-4cdb-a689-dd747d9b52ea', 'attached_at': '', 'detached_at': '', 'volume_id': 'f89046dd-6d18-4fc2-bdc5-f7976aa2861d', 'serial': 'f89046dd-6d18-4fc2-bdc5-f7976aa2861d'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 778.705530] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508006, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.654106} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.705737] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e4ce9fc0-9ee4-480a-9bc9-041b88aa4561 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.707308] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 3fba85c9-7798-4a66-b335-21f80962e0bd/3fba85c9-7798-4a66-b335-21f80962e0bd.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 778.707518] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 778.708014] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e402ea60-1054-4a43-8339-54529286ae26 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.714266] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 778.714266] env[69475]: value = "task-3508008" [ 778.714266] env[69475]: _type = "Task" [ 778.714266] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.718615] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Waiting for the task: (returnval){ [ 778.718615] env[69475]: value = "task-3508009" [ 778.718615] env[69475]: _type = "Task" [ 778.718615] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 778.724846] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508008, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.730103] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3508009, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 778.768481] env[69475]: DEBUG nova.compute.manager [None req-77da5ee8-0e01-463f-9a67-080356e05ff4 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 778.771019] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa2e5fff-0914-41b4-8fa5-d263a0cd4597 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 778.956875] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508005, 'name': CreateSnapshot_Task, 'duration_secs': 1.222316} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 778.957175] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 778.957961] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a300892-cd8d-4ac3-859c-c372304c281d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.042082] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7967c9fd-ddc6-4b3e-b676-4cc3096c3842 tempest-ServersTestJSON-176631825 tempest-ServersTestJSON-176631825-project-member] Lock "8fbabf86-be9e-47ec-8c4c-adea4c68abe8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.440s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.083042] env[69475]: DEBUG nova.compute.manager [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 779.086505] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d372b29c-ab04-4353-8b31-1607d70b2b52 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.165602] env[69475]: DEBUG oslo_concurrency.lockutils [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.225827] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508008, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067179} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.226586] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 779.227361] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27410c8-409b-47b8-9e7d-54c094677cd1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.235323] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3508009, 'name': Rename_Task, 'duration_secs': 0.280133} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 779.236236] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 779.236500] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85044caf-d918-4775-b00f-9f85b7e39621 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.258505] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] 3fba85c9-7798-4a66-b335-21f80962e0bd/3fba85c9-7798-4a66-b335-21f80962e0bd.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 779.259404] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6ad619d-f2c3-4d7b-b303-528d4c2b5dcf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.276521] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Waiting for the task: (returnval){ [ 779.276521] env[69475]: value = "task-3508010" [ 779.276521] env[69475]: _type = "Task" [ 779.276521] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.288109] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 779.288109] env[69475]: value = "task-3508011" [ 779.288109] env[69475]: _type = "Task" [ 779.288109] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.297358] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3508010, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.303479] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508011, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.345111] env[69475]: DEBUG nova.network.neutron [req-3bd05402-20dc-4c22-b237-7e985411cfcc req-b756c51f-2d8b-4954-8eae-3381bcb49d32 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Updated VIF entry in instance network info cache for port 7b155c9d-5e5c-499f-bfd8-a2c59e674bc2. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 779.345481] env[69475]: DEBUG nova.network.neutron [req-3bd05402-20dc-4c22-b237-7e985411cfcc req-b756c51f-2d8b-4954-8eae-3381bcb49d32 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Updating instance_info_cache with network_info: [{"id": "7b155c9d-5e5c-499f-bfd8-a2c59e674bc2", "address": "fa:16:3e:89:09:25", "network": {"id": "1ccefa75-7f28-427a-a2dc-65225b56bc7d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-673445520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47bcbe5bc3a14fbf9ea9617ea7d50342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b155c9d-5e", "ovs_interfaceid": "7b155c9d-5e5c-499f-bfd8-a2c59e674bc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.478643] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 779.478997] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-bcc0414b-0f15-4408-bcb2-2e24f7991d83 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.488245] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 779.488245] env[69475]: value = "task-3508012" [ 779.488245] env[69475]: _type = "Task" [ 779.488245] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.497585] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508012, 'name': CloneVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.597923] env[69475]: INFO nova.compute.manager [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] instance snapshotting [ 779.602102] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea28b2d0-2991-4575-97e5-62afef96da07 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.631139] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe493cd3-5005-4670-a786-2c104a3369fa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.795775] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfc2783-586a-4508-a0ca-29c9321b031c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.798789] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3508010, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.812834] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508011, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.813141] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-77da5ee8-0e01-463f-9a67-080356e05ff4 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Doing hard reboot of VM {{(pid=69475) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 779.813402] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-c9dfc8bc-5f9e-4807-a908-742ebb105ddf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.820341] env[69475]: DEBUG oslo_vmware.api [None req-77da5ee8-0e01-463f-9a67-080356e05ff4 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 779.820341] env[69475]: value = "task-3508013" [ 779.820341] env[69475]: _type = "Task" [ 779.820341] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 779.830414] env[69475]: DEBUG oslo_vmware.api [None req-77da5ee8-0e01-463f-9a67-080356e05ff4 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3508013, 'name': ResetVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 779.848525] env[69475]: DEBUG oslo_concurrency.lockutils [req-3bd05402-20dc-4c22-b237-7e985411cfcc req-b756c51f-2d8b-4954-8eae-3381bcb49d32 service nova] Releasing lock "refresh_cache-df73dd41-7455-4482-abb2-b61b26fcf403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.999210] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508012, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.036680] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2b18cd-c35e-479d-bbf5-99e3ec3356f4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.046282] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e65a7771-8681-461a-9c6b-b0a8b3dd11b6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.090070] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-097e5622-fbd9-4cbd-8af3-6bcc4a3c420a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.098447] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-910dff83-eb4b-4735-8410-5177d1532852 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.112887] env[69475]: DEBUG nova.compute.provider_tree [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 780.142234] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 780.142560] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ede92e88-0c83-4e5d-a28e-388f18ccdf13 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.150358] env[69475]: DEBUG oslo_vmware.api [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 780.150358] env[69475]: value = "task-3508014" [ 780.150358] env[69475]: _type = "Task" [ 780.150358] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.158438] env[69475]: DEBUG oslo_vmware.api [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508014, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.292412] env[69475]: DEBUG oslo_vmware.api [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3508010, 'name': PowerOnVM_Task, 'duration_secs': 0.917611} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.292412] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 780.292655] env[69475]: INFO nova.compute.manager [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Took 8.86 seconds to spawn the instance on the hypervisor. [ 780.292747] env[69475]: DEBUG nova.compute.manager [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 780.293548] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92750149-04f5-4af3-8575-1c3a96d1288c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.310403] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508011, 'name': ReconfigVM_Task, 'duration_secs': 0.659631} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.310956] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Reconfigured VM instance instance-00000030 to attach disk [datastore2] 3fba85c9-7798-4a66-b335-21f80962e0bd/3fba85c9-7798-4a66-b335-21f80962e0bd.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 780.311789] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-753e42c1-6481-42d7-a1c7-82ad0abd4c82 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.320452] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 780.320452] env[69475]: value = "task-3508015" [ 780.320452] env[69475]: _type = "Task" [ 780.320452] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.333981] env[69475]: DEBUG oslo_vmware.api [None req-77da5ee8-0e01-463f-9a67-080356e05ff4 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3508013, 'name': ResetVM_Task, 'duration_secs': 0.105852} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.340493] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-77da5ee8-0e01-463f-9a67-080356e05ff4 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Did hard reboot of VM {{(pid=69475) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 780.341177] env[69475]: DEBUG nova.compute.manager [None req-77da5ee8-0e01-463f-9a67-080356e05ff4 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 780.341622] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508015, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.343026] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ab4a8dc-e106-4706-bebe-aebe85edd3ba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.506025] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508012, 'name': CloneVM_Task} progress is 95%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.616718] env[69475]: DEBUG nova.scheduler.client.report [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 780.661392] env[69475]: DEBUG oslo_vmware.api [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508014, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.821169] env[69475]: INFO nova.compute.manager [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Took 42.87 seconds to build instance. [ 780.831732] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508015, 'name': Rename_Task, 'duration_secs': 0.210209} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 780.832021] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 780.832276] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-21532d56-ef29-4ba4-97d4-8ac238888c1d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.839345] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 780.839345] env[69475]: value = "task-3508016" [ 780.839345] env[69475]: _type = "Task" [ 780.839345] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 780.850840] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508016, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 780.858396] env[69475]: DEBUG oslo_concurrency.lockutils [None req-77da5ee8-0e01-463f-9a67-080356e05ff4 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "df73dd41-7455-4482-abb2-b61b26fcf403" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.467s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 780.871150] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquiring lock "2ade2ed6-4725-4913-8ac4-14a96ced3e4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 780.871381] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "2ade2ed6-4725-4913-8ac4-14a96ced3e4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.002212] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508012, 'name': CloneVM_Task, 'duration_secs': 1.201808} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.002585] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Created linked-clone VM from snapshot [ 781.003573] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44b63a1-87cc-4e36-baa9-d30660889c78 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.012747] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Uploading image 091c65f1-c5d2-4582-a0e4-5024ab969207 {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 781.040907] env[69475]: DEBUG oslo_vmware.rw_handles [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 781.040907] env[69475]: value = "vm-700971" [ 781.040907] env[69475]: _type = "VirtualMachine" [ 781.040907] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 781.041213] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-29cf0268-39bc-4bfa-93d5-8e66a945544a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.049116] env[69475]: DEBUG oslo_vmware.rw_handles [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lease: (returnval){ [ 781.049116] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524f5c8d-e465-ecc4-66a9-929adbcce6d3" [ 781.049116] env[69475]: _type = "HttpNfcLease" [ 781.049116] env[69475]: } obtained for exporting VM: (result){ [ 781.049116] env[69475]: value = "vm-700971" [ 781.049116] env[69475]: _type = "VirtualMachine" [ 781.049116] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 781.049533] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the lease: (returnval){ [ 781.049533] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524f5c8d-e465-ecc4-66a9-929adbcce6d3" [ 781.049533] env[69475]: _type = "HttpNfcLease" [ 781.049533] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 781.055820] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 781.055820] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524f5c8d-e465-ecc4-66a9-929adbcce6d3" [ 781.055820] env[69475]: _type = "HttpNfcLease" [ 781.055820] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 781.123791] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.631s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.125307] env[69475]: DEBUG nova.compute.manager [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 781.130193] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.098s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.131733] env[69475]: INFO nova.compute.claims [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 781.160745] env[69475]: DEBUG oslo_vmware.api [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508014, 'name': CreateSnapshot_Task, 'duration_secs': 0.893672} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.161350] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 781.162283] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c06eb679-1d27-4c8c-ad8c-cf682c34b70c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.327222] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8e3e1ee0-9537-4e86-8b93-1fbe340c3edd tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Lock "8bea34ef-0caf-4cdb-a689-dd747d9b52ea" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.124s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.351757] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508016, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.561559] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 781.561559] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524f5c8d-e465-ecc4-66a9-929adbcce6d3" [ 781.561559] env[69475]: _type = "HttpNfcLease" [ 781.561559] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 781.561559] env[69475]: DEBUG oslo_vmware.rw_handles [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 781.561559] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524f5c8d-e465-ecc4-66a9-929adbcce6d3" [ 781.561559] env[69475]: _type = "HttpNfcLease" [ 781.561559] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 781.561559] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-412ac10e-e8ab-44da-88b2-2227db0bab31 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.568614] env[69475]: DEBUG oslo_vmware.rw_handles [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5219a0e6-de3e-321d-ee40-999595476232/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 781.568798] env[69475]: DEBUG oslo_vmware.rw_handles [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5219a0e6-de3e-321d-ee40-999595476232/disk-0.vmdk for reading. {{(pid=69475) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 781.633719] env[69475]: DEBUG nova.compute.utils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 781.635127] env[69475]: DEBUG nova.compute.manager [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 781.635297] env[69475]: DEBUG nova.network.neutron [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 781.692292] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 781.693409] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b3cffb89-88b9-4d7e-aeb1-99e9ee5f285d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.697094] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-861992f1-5172-4c47-b5f7-42de2bb5d33e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.705023] env[69475]: DEBUG oslo_vmware.api [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 781.705023] env[69475]: value = "task-3508018" [ 781.705023] env[69475]: _type = "Task" [ 781.705023] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 781.717709] env[69475]: DEBUG oslo_vmware.api [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508018, 'name': CloneVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 781.757323] env[69475]: DEBUG nova.policy [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35701016696a4f57a1c34462e46e99d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '02d595a3575a40799470947426047e69', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 781.830925] env[69475]: DEBUG nova.compute.manager [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 781.854416] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508016, 'name': PowerOnVM_Task, 'duration_secs': 0.725963} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 781.854416] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 781.854780] env[69475]: INFO nova.compute.manager [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Took 9.51 seconds to spawn the instance on the hypervisor. [ 781.855303] env[69475]: DEBUG nova.compute.manager [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 781.856171] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f118f6c2-d048-4092-8486-1af0a6929577 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.138903] env[69475]: DEBUG nova.compute.manager [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 782.219943] env[69475]: DEBUG oslo_vmware.api [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508018, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.380920] env[69475]: INFO nova.compute.manager [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Took 42.12 seconds to build instance. [ 782.389970] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.423344] env[69475]: DEBUG nova.network.neutron [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Successfully created port: 1e66a927-4da7-44a7-8abe-812876507f48 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 782.726584] env[69475]: DEBUG oslo_vmware.api [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508018, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 782.756205] env[69475]: DEBUG nova.compute.manager [req-a78eabe1-a662-468f-b905-511be8bf391e req-e120b832-fb3a-4edd-807a-2309fd22a80c service nova] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Received event network-changed-eb7198c7-072e-4cfe-bfdb-5306e3098955 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 782.756702] env[69475]: DEBUG nova.compute.manager [req-a78eabe1-a662-468f-b905-511be8bf391e req-e120b832-fb3a-4edd-807a-2309fd22a80c service nova] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Refreshing instance network info cache due to event network-changed-eb7198c7-072e-4cfe-bfdb-5306e3098955. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 782.756805] env[69475]: DEBUG oslo_concurrency.lockutils [req-a78eabe1-a662-468f-b905-511be8bf391e req-e120b832-fb3a-4edd-807a-2309fd22a80c service nova] Acquiring lock "refresh_cache-8bea34ef-0caf-4cdb-a689-dd747d9b52ea" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.757092] env[69475]: DEBUG oslo_concurrency.lockutils [req-a78eabe1-a662-468f-b905-511be8bf391e req-e120b832-fb3a-4edd-807a-2309fd22a80c service nova] Acquired lock "refresh_cache-8bea34ef-0caf-4cdb-a689-dd747d9b52ea" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 782.758064] env[69475]: DEBUG nova.network.neutron [req-a78eabe1-a662-468f-b905-511be8bf391e req-e120b832-fb3a-4edd-807a-2309fd22a80c service nova] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Refreshing network info cache for port eb7198c7-072e-4cfe-bfdb-5306e3098955 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 782.858470] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Acquiring lock "619a87e7-097c-41af-8452-5437b82e7ebe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 782.859265] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Lock "619a87e7-097c-41af-8452-5437b82e7ebe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 782.863455] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9fb29e-ef2e-46e5-aeee-bb7763a47aba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.871734] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c850de-7a69-4b6d-8b8c-4983c3318996 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.905679] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "3fba85c9-7798-4a66-b335-21f80962e0bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.658s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 782.908555] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-456e6721-2f7e-4f9f-b0bc-140ff685919d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.917087] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd40fd4-56d1-4ed5-8d26-1f092143445b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.936138] env[69475]: DEBUG nova.compute.provider_tree [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.159642] env[69475]: DEBUG nova.compute.manager [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 783.197034] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 783.197329] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 783.197496] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 783.197686] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 783.199572] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 783.200610] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 783.200610] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 783.200610] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 783.200610] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 783.200610] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 783.200610] env[69475]: DEBUG nova.virt.hardware [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 783.201597] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72537a0b-30e8-4628-a86f-7fdd3f96167b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.218127] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a452602-6be2-44d3-b3db-0d525ace77ad {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.952346] env[69475]: DEBUG nova.compute.manager [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 783.955575] env[69475]: DEBUG nova.scheduler.client.report [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 783.978910] env[69475]: DEBUG oslo_vmware.api [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508018, 'name': CloneVM_Task, 'duration_secs': 1.501307} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.978910] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Created linked-clone VM from snapshot [ 783.980261] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf8d25df-1082-47f5-b7ea-14f107566eab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.989724] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Uploading image 31680ff5-995d-4d87-b652-46bc9e35ba3c {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 784.006438] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 784.006736] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-61506ab7-95a5-4009-ad3f-236e3a80e422 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.016885] env[69475]: DEBUG oslo_vmware.api [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 784.016885] env[69475]: value = "task-3508019" [ 784.016885] env[69475]: _type = "Task" [ 784.016885] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.026194] env[69475]: DEBUG oslo_vmware.api [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508019, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.208118] env[69475]: DEBUG nova.network.neutron [req-a78eabe1-a662-468f-b905-511be8bf391e req-e120b832-fb3a-4edd-807a-2309fd22a80c service nova] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Updated VIF entry in instance network info cache for port eb7198c7-072e-4cfe-bfdb-5306e3098955. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 784.208615] env[69475]: DEBUG nova.network.neutron [req-a78eabe1-a662-468f-b905-511be8bf391e req-e120b832-fb3a-4edd-807a-2309fd22a80c service nova] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Updating instance_info_cache with network_info: [{"id": "eb7198c7-072e-4cfe-bfdb-5306e3098955", "address": "fa:16:3e:0f:e3:ee", "network": {"id": "2dd39019-e771-409d-a18f-fe2ab0f534f9", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1698322861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.221", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c76e3845a6d64757b175062c3e2c6198", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "74e6f6e0-95e6-4531-99e9-0e78350fb655", "external-id": "nsx-vlan-transportzone-896", "segmentation_id": 896, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb7198c7-07", "ovs_interfaceid": "eb7198c7-072e-4cfe-bfdb-5306e3098955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.467538] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.337s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.468046] env[69475]: DEBUG nova.compute.manager [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 784.472858] env[69475]: DEBUG oslo_concurrency.lockutils [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquiring lock "df73dd41-7455-4482-abb2-b61b26fcf403" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.473553] env[69475]: DEBUG oslo_concurrency.lockutils [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "df73dd41-7455-4482-abb2-b61b26fcf403" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.473792] env[69475]: DEBUG oslo_concurrency.lockutils [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquiring lock "df73dd41-7455-4482-abb2-b61b26fcf403-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.473993] env[69475]: DEBUG oslo_concurrency.lockutils [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "df73dd41-7455-4482-abb2-b61b26fcf403-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.474187] env[69475]: DEBUG oslo_concurrency.lockutils [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "df73dd41-7455-4482-abb2-b61b26fcf403-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.475998] env[69475]: INFO nova.compute.manager [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Terminating instance [ 784.477294] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 24.168s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 784.493490] env[69475]: DEBUG nova.network.neutron [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Successfully updated port: 1e66a927-4da7-44a7-8abe-812876507f48 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 784.495576] env[69475]: DEBUG oslo_concurrency.lockutils [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 784.531593] env[69475]: DEBUG oslo_vmware.api [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508019, 'name': Destroy_Task, 'duration_secs': 0.40417} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 784.531920] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Destroyed the VM [ 784.532241] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 784.532546] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ed24c7d8-cd08-417e-bf38-5e0dc1a652fe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.541186] env[69475]: DEBUG oslo_vmware.api [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 784.541186] env[69475]: value = "task-3508020" [ 784.541186] env[69475]: _type = "Task" [ 784.541186] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 784.550597] env[69475]: DEBUG oslo_vmware.api [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508020, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 784.712303] env[69475]: DEBUG oslo_concurrency.lockutils [req-a78eabe1-a662-468f-b905-511be8bf391e req-e120b832-fb3a-4edd-807a-2309fd22a80c service nova] Releasing lock "refresh_cache-8bea34ef-0caf-4cdb-a689-dd747d9b52ea" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.847771] env[69475]: DEBUG nova.compute.manager [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Received event network-changed-7b155c9d-5e5c-499f-bfd8-a2c59e674bc2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 784.847965] env[69475]: DEBUG nova.compute.manager [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Refreshing instance network info cache due to event network-changed-7b155c9d-5e5c-499f-bfd8-a2c59e674bc2. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 784.848220] env[69475]: DEBUG oslo_concurrency.lockutils [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] Acquiring lock "refresh_cache-df73dd41-7455-4482-abb2-b61b26fcf403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.848353] env[69475]: DEBUG oslo_concurrency.lockutils [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] Acquired lock "refresh_cache-df73dd41-7455-4482-abb2-b61b26fcf403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.848526] env[69475]: DEBUG nova.network.neutron [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Refreshing network info cache for port 7b155c9d-5e5c-499f-bfd8-a2c59e674bc2 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 784.980915] env[69475]: DEBUG nova.compute.utils [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 784.982447] env[69475]: DEBUG nova.compute.manager [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 784.982616] env[69475]: DEBUG nova.network.neutron [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 784.984787] env[69475]: DEBUG nova.compute.manager [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 784.984980] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 784.987752] env[69475]: INFO nova.compute.claims [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 784.992537] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d3ee9e-73e3-41aa-9bfb-8afd2e72b2b3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 784.998070] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "refresh_cache-420ecc09-60c8-4a14-8504-d11d760ddbb4" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.998208] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquired lock "refresh_cache-420ecc09-60c8-4a14-8504-d11d760ddbb4" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.998352] env[69475]: DEBUG nova.network.neutron [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 785.001914] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 785.002219] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-136ed02d-8658-409f-b111-d8833d5d7d87 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.008617] env[69475]: DEBUG oslo_vmware.api [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 785.008617] env[69475]: value = "task-3508021" [ 785.008617] env[69475]: _type = "Task" [ 785.008617] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.018339] env[69475]: DEBUG oslo_vmware.api [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3508021, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.040241] env[69475]: DEBUG nova.policy [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '26c5fd8183bd4d4999f7e2305faee3b8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '385b952569624908badf6708cae97a51', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 785.053394] env[69475]: DEBUG oslo_vmware.api [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508020, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.491361] env[69475]: DEBUG nova.network.neutron [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Successfully created port: cc6e2aa6-9a4f-46b5-9fbe-04f7e4551c5c {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.493034] env[69475]: DEBUG nova.compute.manager [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 785.496455] env[69475]: INFO nova.compute.resource_tracker [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Updating resource usage from migration 7cecf250-4d16-45a0-ba21-6bbaf5ce2c0e [ 785.519595] env[69475]: DEBUG oslo_vmware.api [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3508021, 'name': PowerOffVM_Task, 'duration_secs': 0.40051} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.519595] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 785.519595] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 785.519595] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7da46f9a-32d6-4408-a73b-b01db3387d91 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.553749] env[69475]: DEBUG oslo_vmware.api [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508020, 'name': RemoveSnapshot_Task, 'duration_secs': 0.950602} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 785.553961] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 785.573604] env[69475]: DEBUG nova.network.neutron [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 785.594544] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 785.594779] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 785.594959] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Deleting the datastore file [datastore2] df73dd41-7455-4482-abb2-b61b26fcf403 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 785.598292] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-290aeca5-c9b0-48e2-aba2-df5ec228dcb1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.609822] env[69475]: DEBUG oslo_vmware.api [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 785.609822] env[69475]: value = "task-3508023" [ 785.609822] env[69475]: _type = "Task" [ 785.609822] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.620148] env[69475]: DEBUG oslo_vmware.api [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3508023, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.623502] env[69475]: DEBUG nova.network.neutron [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Updated VIF entry in instance network info cache for port 7b155c9d-5e5c-499f-bfd8-a2c59e674bc2. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 785.623896] env[69475]: DEBUG nova.network.neutron [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Updating instance_info_cache with network_info: [{"id": "7b155c9d-5e5c-499f-bfd8-a2c59e674bc2", "address": "fa:16:3e:89:09:25", "network": {"id": "1ccefa75-7f28-427a-a2dc-65225b56bc7d", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-673445520-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "47bcbe5bc3a14fbf9ea9617ea7d50342", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b155c9d-5e", "ovs_interfaceid": "7b155c9d-5e5c-499f-bfd8-a2c59e674bc2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.930142] env[69475]: DEBUG nova.network.neutron [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Updating instance_info_cache with network_info: [{"id": "1e66a927-4da7-44a7-8abe-812876507f48", "address": "fa:16:3e:73:2c:78", "network": {"id": "c5897d3f-e695-4647-81ec-1226e3bd3b3c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1437300532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02d595a3575a40799470947426047e69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e66a927-4d", "ovs_interfaceid": "1e66a927-4da7-44a7-8abe-812876507f48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.057096] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcee15b-66af-4596-8e02-6c98673925e3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.061203] env[69475]: WARNING nova.compute.manager [None req-d23dcc63-cb13-4c98-bac7-585a7b0d8708 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Image not found during snapshot: nova.exception.ImageNotFound: Image 31680ff5-995d-4d87-b652-46bc9e35ba3c could not be found. [ 786.067748] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6267d2-daaa-4d7f-8282-f9d973266eb7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.099384] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1acec329-6c22-41a0-8bbe-674aaf8ae8ff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.108272] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dafce66-4a1f-41d0-b273-f470184f2612 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.125487] env[69475]: DEBUG nova.compute.provider_tree [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 786.130047] env[69475]: DEBUG oslo_concurrency.lockutils [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] Releasing lock "refresh_cache-df73dd41-7455-4482-abb2-b61b26fcf403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.130309] env[69475]: DEBUG nova.compute.manager [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Received event network-vif-plugged-1e66a927-4da7-44a7-8abe-812876507f48 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 786.130508] env[69475]: DEBUG oslo_concurrency.lockutils [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] Acquiring lock "420ecc09-60c8-4a14-8504-d11d760ddbb4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.130709] env[69475]: DEBUG oslo_concurrency.lockutils [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] Lock "420ecc09-60c8-4a14-8504-d11d760ddbb4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.130867] env[69475]: DEBUG oslo_concurrency.lockutils [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] Lock "420ecc09-60c8-4a14-8504-d11d760ddbb4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.131041] env[69475]: DEBUG nova.compute.manager [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] No waiting events found dispatching network-vif-plugged-1e66a927-4da7-44a7-8abe-812876507f48 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 786.131207] env[69475]: WARNING nova.compute.manager [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Received unexpected event network-vif-plugged-1e66a927-4da7-44a7-8abe-812876507f48 for instance with vm_state building and task_state spawning. [ 786.131366] env[69475]: DEBUG nova.compute.manager [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Received event network-changed-1e66a927-4da7-44a7-8abe-812876507f48 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 786.131516] env[69475]: DEBUG nova.compute.manager [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Refreshing instance network info cache due to event network-changed-1e66a927-4da7-44a7-8abe-812876507f48. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 786.131715] env[69475]: DEBUG oslo_concurrency.lockutils [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] Acquiring lock "refresh_cache-420ecc09-60c8-4a14-8504-d11d760ddbb4" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.131932] env[69475]: DEBUG oslo_vmware.api [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3508023, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.476012} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.132398] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 786.132576] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 786.132762] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 786.132926] env[69475]: INFO nova.compute.manager [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Took 1.15 seconds to destroy the instance on the hypervisor. [ 786.133167] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 786.133353] env[69475]: DEBUG nova.compute.manager [-] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 786.133442] env[69475]: DEBUG nova.network.neutron [-] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 786.434584] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Releasing lock "refresh_cache-420ecc09-60c8-4a14-8504-d11d760ddbb4" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 786.435282] env[69475]: DEBUG nova.compute.manager [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Instance network_info: |[{"id": "1e66a927-4da7-44a7-8abe-812876507f48", "address": "fa:16:3e:73:2c:78", "network": {"id": "c5897d3f-e695-4647-81ec-1226e3bd3b3c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1437300532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02d595a3575a40799470947426047e69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e66a927-4d", "ovs_interfaceid": "1e66a927-4da7-44a7-8abe-812876507f48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 786.435658] env[69475]: DEBUG oslo_concurrency.lockutils [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] Acquired lock "refresh_cache-420ecc09-60c8-4a14-8504-d11d760ddbb4" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.435995] env[69475]: DEBUG nova.network.neutron [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Refreshing network info cache for port 1e66a927-4da7-44a7-8abe-812876507f48 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 786.438554] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:2c:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f92f0b92-d6fb-4d00-8ad5-6b3809ed5493', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1e66a927-4da7-44a7-8abe-812876507f48', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 786.447872] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 786.448363] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 786.448588] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5fd39f0a-93f5-4ad2-b140-554440ef6490 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.469366] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 786.469366] env[69475]: value = "task-3508024" [ 786.469366] env[69475]: _type = "Task" [ 786.469366] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.480722] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508024, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.510906] env[69475]: DEBUG nova.compute.manager [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 786.544292] env[69475]: DEBUG nova.virt.hardware [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 786.544679] env[69475]: DEBUG nova.virt.hardware [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 786.544767] env[69475]: DEBUG nova.virt.hardware [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 786.544883] env[69475]: DEBUG nova.virt.hardware [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 786.545035] env[69475]: DEBUG nova.virt.hardware [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 786.545185] env[69475]: DEBUG nova.virt.hardware [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 786.545393] env[69475]: DEBUG nova.virt.hardware [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 786.545563] env[69475]: DEBUG nova.virt.hardware [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 786.545736] env[69475]: DEBUG nova.virt.hardware [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 786.545908] env[69475]: DEBUG nova.virt.hardware [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 786.546083] env[69475]: DEBUG nova.virt.hardware [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 786.546967] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad61cd1-d203-46f1-9736-e42b82c680ac {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.555367] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d7dbe5b-8301-40a5-a77f-d8242b897cbf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.634453] env[69475]: DEBUG nova.scheduler.client.report [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 786.753726] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "a75d7a92-4ac7-4fa0-90f0-f0a0993e881e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.754144] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "a75d7a92-4ac7-4fa0-90f0-f0a0993e881e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.754381] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "a75d7a92-4ac7-4fa0-90f0-f0a0993e881e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 786.754566] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "a75d7a92-4ac7-4fa0-90f0-f0a0993e881e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 786.754733] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "a75d7a92-4ac7-4fa0-90f0-f0a0993e881e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 786.757732] env[69475]: INFO nova.compute.manager [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Terminating instance [ 786.874858] env[69475]: DEBUG nova.compute.manager [req-c4279c8e-ec51-4576-8fd6-a4b02b69bd5b req-3927283f-7342-495a-9d62-2a58be536fb3 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Received event network-vif-deleted-7b155c9d-5e5c-499f-bfd8-a2c59e674bc2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 786.875011] env[69475]: INFO nova.compute.manager [req-c4279c8e-ec51-4576-8fd6-a4b02b69bd5b req-3927283f-7342-495a-9d62-2a58be536fb3 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Neutron deleted interface 7b155c9d-5e5c-499f-bfd8-a2c59e674bc2; detaching it from the instance and deleting it from the info cache [ 786.875222] env[69475]: DEBUG nova.network.neutron [req-c4279c8e-ec51-4576-8fd6-a4b02b69bd5b req-3927283f-7342-495a-9d62-2a58be536fb3 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.978048] env[69475]: DEBUG nova.network.neutron [-] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 786.982623] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508024, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.140456] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.663s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.140785] env[69475]: INFO nova.compute.manager [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Migrating [ 787.141442] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.141442] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquired lock "compute-rpcapi-router" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.142370] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.648s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.150278] env[69475]: INFO nova.compute.claims [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 787.265859] env[69475]: DEBUG nova.compute.manager [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 787.266127] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 787.267107] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac5c330-5fea-465e-ad94-bb2cfb48bf3e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.275531] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 787.275824] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5d831b7-b4fa-4196-9c87-a9caaf72b88c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.282437] env[69475]: DEBUG oslo_vmware.api [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 787.282437] env[69475]: value = "task-3508025" [ 787.282437] env[69475]: _type = "Task" [ 787.282437] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.290729] env[69475]: DEBUG oslo_vmware.api [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508025, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.360927] env[69475]: DEBUG nova.network.neutron [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Updated VIF entry in instance network info cache for port 1e66a927-4da7-44a7-8abe-812876507f48. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 787.361335] env[69475]: DEBUG nova.network.neutron [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Updating instance_info_cache with network_info: [{"id": "1e66a927-4da7-44a7-8abe-812876507f48", "address": "fa:16:3e:73:2c:78", "network": {"id": "c5897d3f-e695-4647-81ec-1226e3bd3b3c", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-1437300532-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "02d595a3575a40799470947426047e69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f92f0b92-d6fb-4d00-8ad5-6b3809ed5493", "external-id": "nsx-vlan-transportzone-48", "segmentation_id": 48, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1e66a927-4d", "ovs_interfaceid": "1e66a927-4da7-44a7-8abe-812876507f48", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.378257] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7509acc8-7848-4869-be86-f63e56d75608 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.390084] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a176a3e2-54d2-4552-8cfe-1b2e9a679c30 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.429807] env[69475]: DEBUG nova.compute.manager [req-c4279c8e-ec51-4576-8fd6-a4b02b69bd5b req-3927283f-7342-495a-9d62-2a58be536fb3 service nova] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Detach interface failed, port_id=7b155c9d-5e5c-499f-bfd8-a2c59e674bc2, reason: Instance df73dd41-7455-4482-abb2-b61b26fcf403 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 787.481219] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508024, 'name': CreateVM_Task, 'duration_secs': 0.698889} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.481440] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 787.482202] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.482365] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.482719] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 787.482980] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73163238-0795-4c3f-9107-33a9e9be7993 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.484965] env[69475]: INFO nova.compute.manager [-] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Took 1.35 seconds to deallocate network for instance. [ 787.492154] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 787.492154] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52719c43-73b6-596e-313d-faaece8dcaa3" [ 787.492154] env[69475]: _type = "Task" [ 787.492154] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.510115] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52719c43-73b6-596e-313d-faaece8dcaa3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.599304] env[69475]: DEBUG nova.network.neutron [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Successfully updated port: cc6e2aa6-9a4f-46b5-9fbe-04f7e4551c5c {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 787.652311] env[69475]: INFO nova.compute.rpcapi [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 787.653065] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Releasing lock "compute-rpcapi-router" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.794880] env[69475]: DEBUG oslo_vmware.api [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508025, 'name': PowerOffVM_Task, 'duration_secs': 0.276602} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.795244] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 787.795424] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 787.795726] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d5cac225-7298-47e6-90b0-643d5b826132 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.864796] env[69475]: DEBUG oslo_concurrency.lockutils [req-05b03daf-de39-40ad-a1a6-9676c1f3fd8f req-d04d79f1-1109-4fa5-99a8-1fd3fb8028c2 service nova] Releasing lock "refresh_cache-420ecc09-60c8-4a14-8504-d11d760ddbb4" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.883831] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 787.884119] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 787.884316] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Deleting the datastore file [datastore2] a75d7a92-4ac7-4fa0-90f0-f0a0993e881e {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 787.884585] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2284693f-a5a0-4329-912d-622b283cc918 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.891529] env[69475]: DEBUG oslo_vmware.api [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 787.891529] env[69475]: value = "task-3508027" [ 787.891529] env[69475]: _type = "Task" [ 787.891529] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.899602] env[69475]: DEBUG oslo_vmware.api [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508027, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.997146] env[69475]: DEBUG oslo_concurrency.lockutils [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.011671] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52719c43-73b6-596e-313d-faaece8dcaa3, 'name': SearchDatastore_Task, 'duration_secs': 0.018555} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.012293] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.012609] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 788.012924] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.013103] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.013298] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 788.013562] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-809ab0f9-3028-4b93-a567-e25570c8a0b6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.033071] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 788.033336] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 788.034054] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9884e69-6d7c-4aae-9b70-9982a2ebad51 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.039297] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 788.039297] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52296e1b-8a5d-3df7-92fd-90eb0df2d999" [ 788.039297] env[69475]: _type = "Task" [ 788.039297] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.050194] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52296e1b-8a5d-3df7-92fd-90eb0df2d999, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.101500] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Acquiring lock "refresh_cache-41c23568-c8d7-4d6c-8cc4-a94c95b3223a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.101710] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Acquired lock "refresh_cache-41c23568-c8d7-4d6c-8cc4-a94c95b3223a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.101870] env[69475]: DEBUG nova.network.neutron [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 788.148570] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a6a86e-f80c-427f-954a-45e90370d81f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.158022] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2245c02e-4b8d-490d-96b5-b9e20a3b26f2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.192453] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "refresh_cache-4b3b53d1-82bf-40e7-9988-af7b51e9883a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.192671] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquired lock "refresh_cache-4b3b53d1-82bf-40e7-9988-af7b51e9883a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 788.192813] env[69475]: DEBUG nova.network.neutron [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 788.196063] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecf5d191-8bc6-4f3e-9927-f03789acd5ff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.204712] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f9d1320-9c7f-46ac-a5ad-4da8bd767c8b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.221143] env[69475]: DEBUG nova.compute.provider_tree [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 788.401678] env[69475]: DEBUG oslo_vmware.api [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508027, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.334989} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.401917] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 788.402116] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 788.402293] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 788.402515] env[69475]: INFO nova.compute.manager [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 788.402698] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 788.402888] env[69475]: DEBUG nova.compute.manager [-] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 788.402985] env[69475]: DEBUG nova.network.neutron [-] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 788.549936] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52296e1b-8a5d-3df7-92fd-90eb0df2d999, 'name': SearchDatastore_Task, 'duration_secs': 0.014563} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.550703] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-347d1be2-9b76-4630-92ad-6ffc3e52d525 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.556809] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 788.556809] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5298d81e-2602-b1af-6050-b210da3ca4c3" [ 788.556809] env[69475]: _type = "Task" [ 788.556809] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.568339] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5298d81e-2602-b1af-6050-b210da3ca4c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.650702] env[69475]: DEBUG nova.network.neutron [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.724560] env[69475]: DEBUG nova.scheduler.client.report [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 788.936971] env[69475]: DEBUG nova.compute.manager [req-7c800d70-da0e-41b6-bd55-016cc4c3c2f4 req-7aa5acfd-c30a-4ddf-92e7-e8a7320a71b4 service nova] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Received event network-vif-plugged-cc6e2aa6-9a4f-46b5-9fbe-04f7e4551c5c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 788.937235] env[69475]: DEBUG oslo_concurrency.lockutils [req-7c800d70-da0e-41b6-bd55-016cc4c3c2f4 req-7aa5acfd-c30a-4ddf-92e7-e8a7320a71b4 service nova] Acquiring lock "41c23568-c8d7-4d6c-8cc4-a94c95b3223a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.937534] env[69475]: DEBUG oslo_concurrency.lockutils [req-7c800d70-da0e-41b6-bd55-016cc4c3c2f4 req-7aa5acfd-c30a-4ddf-92e7-e8a7320a71b4 service nova] Lock "41c23568-c8d7-4d6c-8cc4-a94c95b3223a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.937921] env[69475]: DEBUG oslo_concurrency.lockutils [req-7c800d70-da0e-41b6-bd55-016cc4c3c2f4 req-7aa5acfd-c30a-4ddf-92e7-e8a7320a71b4 service nova] Lock "41c23568-c8d7-4d6c-8cc4-a94c95b3223a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.938140] env[69475]: DEBUG nova.compute.manager [req-7c800d70-da0e-41b6-bd55-016cc4c3c2f4 req-7aa5acfd-c30a-4ddf-92e7-e8a7320a71b4 service nova] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] No waiting events found dispatching network-vif-plugged-cc6e2aa6-9a4f-46b5-9fbe-04f7e4551c5c {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 788.938359] env[69475]: WARNING nova.compute.manager [req-7c800d70-da0e-41b6-bd55-016cc4c3c2f4 req-7aa5acfd-c30a-4ddf-92e7-e8a7320a71b4 service nova] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Received unexpected event network-vif-plugged-cc6e2aa6-9a4f-46b5-9fbe-04f7e4551c5c for instance with vm_state building and task_state spawning. [ 788.938640] env[69475]: DEBUG nova.compute.manager [req-7c800d70-da0e-41b6-bd55-016cc4c3c2f4 req-7aa5acfd-c30a-4ddf-92e7-e8a7320a71b4 service nova] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Received event network-changed-cc6e2aa6-9a4f-46b5-9fbe-04f7e4551c5c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 788.938856] env[69475]: DEBUG nova.compute.manager [req-7c800d70-da0e-41b6-bd55-016cc4c3c2f4 req-7aa5acfd-c30a-4ddf-92e7-e8a7320a71b4 service nova] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Refreshing instance network info cache due to event network-changed-cc6e2aa6-9a4f-46b5-9fbe-04f7e4551c5c. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 788.939191] env[69475]: DEBUG oslo_concurrency.lockutils [req-7c800d70-da0e-41b6-bd55-016cc4c3c2f4 req-7aa5acfd-c30a-4ddf-92e7-e8a7320a71b4 service nova] Acquiring lock "refresh_cache-41c23568-c8d7-4d6c-8cc4-a94c95b3223a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.961281] env[69475]: DEBUG nova.network.neutron [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Updating instance_info_cache with network_info: [{"id": "cc6e2aa6-9a4f-46b5-9fbe-04f7e4551c5c", "address": "fa:16:3e:40:8b:50", "network": {"id": "30dd2cfd-4344-44cf-a445-8f85cec52b0d", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1175066341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "385b952569624908badf6708cae97a51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc6e2aa6-9a", "ovs_interfaceid": "cc6e2aa6-9a4f-46b5-9fbe-04f7e4551c5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.061978] env[69475]: DEBUG nova.network.neutron [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Updating instance_info_cache with network_info: [{"id": "fd636137-6583-4c7a-937a-701561e4141a", "address": "fa:16:3e:82:54:fb", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd636137-65", "ovs_interfaceid": "fd636137-6583-4c7a-937a-701561e4141a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.071346] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5298d81e-2602-b1af-6050-b210da3ca4c3, 'name': SearchDatastore_Task, 'duration_secs': 0.012821} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.072115] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.074019] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 420ecc09-60c8-4a14-8504-d11d760ddbb4/420ecc09-60c8-4a14-8504-d11d760ddbb4.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 789.074019] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-78ee21bb-5466-4d80-a3d0-2d6de2071b25 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.083590] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 789.083590] env[69475]: value = "task-3508028" [ 789.083590] env[69475]: _type = "Task" [ 789.083590] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.093510] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508028, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.146315] env[69475]: DEBUG nova.network.neutron [-] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.230896] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.088s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 789.231529] env[69475]: DEBUG nova.compute.manager [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 789.234195] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.854s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 789.235667] env[69475]: INFO nova.compute.claims [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 789.465029] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Releasing lock "refresh_cache-41c23568-c8d7-4d6c-8cc4-a94c95b3223a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.465193] env[69475]: DEBUG nova.compute.manager [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Instance network_info: |[{"id": "cc6e2aa6-9a4f-46b5-9fbe-04f7e4551c5c", "address": "fa:16:3e:40:8b:50", "network": {"id": "30dd2cfd-4344-44cf-a445-8f85cec52b0d", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1175066341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "385b952569624908badf6708cae97a51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc6e2aa6-9a", "ovs_interfaceid": "cc6e2aa6-9a4f-46b5-9fbe-04f7e4551c5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 789.465444] env[69475]: DEBUG oslo_concurrency.lockutils [req-7c800d70-da0e-41b6-bd55-016cc4c3c2f4 req-7aa5acfd-c30a-4ddf-92e7-e8a7320a71b4 service nova] Acquired lock "refresh_cache-41c23568-c8d7-4d6c-8cc4-a94c95b3223a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.465674] env[69475]: DEBUG nova.network.neutron [req-7c800d70-da0e-41b6-bd55-016cc4c3c2f4 req-7aa5acfd-c30a-4ddf-92e7-e8a7320a71b4 service nova] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Refreshing network info cache for port cc6e2aa6-9a4f-46b5-9fbe-04f7e4551c5c {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 789.467212] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:8b:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2bf99f85-3a5c-47c6-a603-e215be6ab0bd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc6e2aa6-9a4f-46b5-9fbe-04f7e4551c5c', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 789.475429] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Creating folder: Project (385b952569624908badf6708cae97a51). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 789.479768] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-719212d7-cb5d-4507-8859-87e44dd82043 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.491649] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Created folder: Project (385b952569624908badf6708cae97a51) in parent group-v700823. [ 789.491891] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Creating folder: Instances. Parent ref: group-v700975. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 789.492209] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-340457f6-af0f-43be-94ae-e5faf72593e4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.502422] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Created folder: Instances in parent group-v700975. [ 789.503088] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 789.503088] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 789.503243] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b7045057-68af-4103-8010-b450ddc29b5d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.523618] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 789.523618] env[69475]: value = "task-3508031" [ 789.523618] env[69475]: _type = "Task" [ 789.523618] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.536415] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508031, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.573444] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Releasing lock "refresh_cache-4b3b53d1-82bf-40e7-9988-af7b51e9883a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.603079] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508028, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.649241] env[69475]: INFO nova.compute.manager [-] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Took 1.25 seconds to deallocate network for instance. [ 789.743348] env[69475]: DEBUG nova.compute.utils [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 789.749032] env[69475]: DEBUG nova.compute.manager [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 789.749032] env[69475]: DEBUG nova.network.neutron [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 789.775838] env[69475]: DEBUG nova.network.neutron [req-7c800d70-da0e-41b6-bd55-016cc4c3c2f4 req-7aa5acfd-c30a-4ddf-92e7-e8a7320a71b4 service nova] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Updated VIF entry in instance network info cache for port cc6e2aa6-9a4f-46b5-9fbe-04f7e4551c5c. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 789.776696] env[69475]: DEBUG nova.network.neutron [req-7c800d70-da0e-41b6-bd55-016cc4c3c2f4 req-7aa5acfd-c30a-4ddf-92e7-e8a7320a71b4 service nova] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Updating instance_info_cache with network_info: [{"id": "cc6e2aa6-9a4f-46b5-9fbe-04f7e4551c5c", "address": "fa:16:3e:40:8b:50", "network": {"id": "30dd2cfd-4344-44cf-a445-8f85cec52b0d", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1175066341-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "385b952569624908badf6708cae97a51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2bf99f85-3a5c-47c6-a603-e215be6ab0bd", "external-id": "nsx-vlan-transportzone-855", "segmentation_id": 855, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc6e2aa6-9a", "ovs_interfaceid": "cc6e2aa6-9a4f-46b5-9fbe-04f7e4551c5c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.802126] env[69475]: DEBUG nova.policy [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d4323c195b24245a75109e165f900f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e6dd9c026624896ae4de7fab35720d8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 790.036386] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508031, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.093688] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508028, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.709283} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.093959] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 420ecc09-60c8-4a14-8504-d11d760ddbb4/420ecc09-60c8-4a14-8504-d11d760ddbb4.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 790.094232] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 790.094493] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bbc34a22-5bee-4751-94c1-d51e1a3b6bd2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.100914] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 790.100914] env[69475]: value = "task-3508032" [ 790.100914] env[69475]: _type = "Task" [ 790.100914] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.110177] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508032, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.157491] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.247517] env[69475]: DEBUG nova.network.neutron [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Successfully created port: 6c87b79b-ed3d-448d-a02d-1004956a1d8d {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 790.252395] env[69475]: DEBUG nova.compute.manager [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 790.282934] env[69475]: DEBUG oslo_concurrency.lockutils [req-7c800d70-da0e-41b6-bd55-016cc4c3c2f4 req-7aa5acfd-c30a-4ddf-92e7-e8a7320a71b4 service nova] Releasing lock "refresh_cache-41c23568-c8d7-4d6c-8cc4-a94c95b3223a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.282934] env[69475]: DEBUG nova.compute.manager [req-7c800d70-da0e-41b6-bd55-016cc4c3c2f4 req-7aa5acfd-c30a-4ddf-92e7-e8a7320a71b4 service nova] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Received event network-vif-deleted-5283f252-4c4d-4aaa-81d9-5fccc6edff8d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 790.285368] env[69475]: INFO nova.compute.manager [req-7c800d70-da0e-41b6-bd55-016cc4c3c2f4 req-7aa5acfd-c30a-4ddf-92e7-e8a7320a71b4 service nova] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Neutron deleted interface 5283f252-4c4d-4aaa-81d9-5fccc6edff8d; detaching it from the instance and deleting it from the info cache [ 790.285368] env[69475]: DEBUG nova.network.neutron [req-7c800d70-da0e-41b6-bd55-016cc4c3c2f4 req-7aa5acfd-c30a-4ddf-92e7-e8a7320a71b4 service nova] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.538873] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508031, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.615519] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508032, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066122} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.615519] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 790.615519] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae25b9d-2700-49fc-a31a-4c1e533cfbdd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.640412] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 420ecc09-60c8-4a14-8504-d11d760ddbb4/420ecc09-60c8-4a14-8504-d11d760ddbb4.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 790.643695] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a27cd730-8645-412b-aecc-7b8a98075176 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.665611] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 790.665611] env[69475]: value = "task-3508033" [ 790.665611] env[69475]: _type = "Task" [ 790.665611] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.681028] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508033, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.792912] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-adcf05af-4b49-4ee3-ae83-776fdf704469 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.804939] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d3c8f5a-96e9-491f-949b-3958a593bb7e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.847162] env[69475]: DEBUG nova.compute.manager [req-7c800d70-da0e-41b6-bd55-016cc4c3c2f4 req-7aa5acfd-c30a-4ddf-92e7-e8a7320a71b4 service nova] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Detach interface failed, port_id=5283f252-4c4d-4aaa-81d9-5fccc6edff8d, reason: Instance a75d7a92-4ac7-4fa0-90f0-f0a0993e881e could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 790.882484] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef057d59-8d16-4331-aab8-71f9e4be3753 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.890050] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf57a3f-3f24-4348-a5d6-25005da9baa6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.920271] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b93f6e84-ed1a-45b6-9f39-ae66356369d4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.927774] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e335f61-edc2-4ee8-840d-0dca4c9d849b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.943202] env[69475]: DEBUG nova.compute.provider_tree [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 791.039259] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508031, 'name': CreateVM_Task, 'duration_secs': 1.359474} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.039436] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 791.040253] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.040416] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.040961] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 791.041230] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f300bad2-b038-4d03-96c5-38d0c33f20f7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.045940] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Waiting for the task: (returnval){ [ 791.045940] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ac6732-3874-52a6-1886-c1b535704b5c" [ 791.045940] env[69475]: _type = "Task" [ 791.045940] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.053680] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ac6732-3874-52a6-1886-c1b535704b5c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.096605] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446a8ad4-29e5-48a4-b7b4-2b489cfd3076 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.114654] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Updating instance '4b3b53d1-82bf-40e7-9988-af7b51e9883a' progress to 0 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 791.176102] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508033, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.262039] env[69475]: DEBUG nova.compute.manager [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 791.284369] env[69475]: DEBUG nova.virt.hardware [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 791.284654] env[69475]: DEBUG nova.virt.hardware [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 791.284814] env[69475]: DEBUG nova.virt.hardware [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 791.284995] env[69475]: DEBUG nova.virt.hardware [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 791.285166] env[69475]: DEBUG nova.virt.hardware [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 791.285316] env[69475]: DEBUG nova.virt.hardware [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 791.285521] env[69475]: DEBUG nova.virt.hardware [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 791.285680] env[69475]: DEBUG nova.virt.hardware [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 791.285869] env[69475]: DEBUG nova.virt.hardware [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 791.286009] env[69475]: DEBUG nova.virt.hardware [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 791.286190] env[69475]: DEBUG nova.virt.hardware [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 791.287070] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba821ffd-5661-47a7-8e8a-71bd500d676a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.295418] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898cbb3a-e9dc-45de-a3fd-62d5114dbbce {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.465062] env[69475]: ERROR nova.scheduler.client.report [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [req-23ded25c-bab2-4994-aa89-c7729aa6cf43] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dd221100-68c1-4a75-92b5-b24d81fee5da. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-23ded25c-bab2-4994-aa89-c7729aa6cf43"}]} [ 791.480991] env[69475]: DEBUG nova.scheduler.client.report [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Refreshing inventories for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 791.496719] env[69475]: DEBUG nova.scheduler.client.report [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Updating ProviderTree inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 791.496959] env[69475]: DEBUG nova.compute.provider_tree [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 791.508060] env[69475]: DEBUG nova.scheduler.client.report [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Refreshing aggregate associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, aggregates: None {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 791.526498] env[69475]: DEBUG nova.scheduler.client.report [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Refreshing trait associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 791.557788] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ac6732-3874-52a6-1886-c1b535704b5c, 'name': SearchDatastore_Task, 'duration_secs': 0.030871} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.560413] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.560640] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 791.560878] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 791.561039] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.561225] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 791.561701] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a9e3f3c-f9d1-4996-a73c-a3c0bb56e963 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.570049] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 791.570111] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 791.573028] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7d36d38-f881-4843-81fc-9b52af77c7f1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.578313] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Waiting for the task: (returnval){ [ 791.578313] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b47da5-3a58-0004-4216-79abe5c66220" [ 791.578313] env[69475]: _type = "Task" [ 791.578313] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.586135] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b47da5-3a58-0004-4216-79abe5c66220, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.620666] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 791.621138] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef1cf1de-d051-42e6-a10b-4b88d683b6cc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.631036] env[69475]: DEBUG oslo_vmware.api [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 791.631036] env[69475]: value = "task-3508034" [ 791.631036] env[69475]: _type = "Task" [ 791.631036] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.641007] env[69475]: DEBUG oslo_vmware.api [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508034, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.679577] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508033, 'name': ReconfigVM_Task, 'duration_secs': 0.728808} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.679786] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 420ecc09-60c8-4a14-8504-d11d760ddbb4/420ecc09-60c8-4a14-8504-d11d760ddbb4.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 791.680537] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d782d767-eb37-4d2e-b47b-8543462e3e30 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.687920] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 791.687920] env[69475]: value = "task-3508035" [ 791.687920] env[69475]: _type = "Task" [ 791.687920] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.696951] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508035, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.026176] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee86c209-1615-4de5-b558-530b72125b2b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.034130] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b92f1d6f-65f0-4b8e-a3f5-816fdc45f092 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.069889] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5fdaa6-fef3-4c89-96d6-317201591b82 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.077264] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ef3183e-f0b5-4f4e-9854-179b6c2cdc0c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.090969] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b47da5-3a58-0004-4216-79abe5c66220, 'name': SearchDatastore_Task, 'duration_secs': 0.011235} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.100008] env[69475]: DEBUG nova.compute.provider_tree [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 792.101404] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6bbb7811-612c-4881-8e60-35f0e5743d96 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.106881] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Waiting for the task: (returnval){ [ 792.106881] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520794f9-69a3-4b06-953d-af94d7dea20f" [ 792.106881] env[69475]: _type = "Task" [ 792.106881] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.114371] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520794f9-69a3-4b06-953d-af94d7dea20f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.140962] env[69475]: DEBUG oslo_vmware.api [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508034, 'name': PowerOffVM_Task, 'duration_secs': 0.263105} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.141241] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 792.141415] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Updating instance '4b3b53d1-82bf-40e7-9988-af7b51e9883a' progress to 17 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 792.197806] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508035, 'name': Rename_Task, 'duration_secs': 0.175053} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.198410] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 792.198410] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f5f03b75-7fce-49f9-9457-6b2e806de4bc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.204455] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 792.204455] env[69475]: value = "task-3508036" [ 792.204455] env[69475]: _type = "Task" [ 792.204455] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.212515] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508036, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.215865] env[69475]: DEBUG nova.compute.manager [req-ca3564b9-5745-4efe-afa8-f44801b73af8 req-cf318d4a-f523-4ea9-9405-3be22150b9d6 service nova] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Received event network-vif-plugged-6c87b79b-ed3d-448d-a02d-1004956a1d8d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 792.216093] env[69475]: DEBUG oslo_concurrency.lockutils [req-ca3564b9-5745-4efe-afa8-f44801b73af8 req-cf318d4a-f523-4ea9-9405-3be22150b9d6 service nova] Acquiring lock "e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 792.216294] env[69475]: DEBUG oslo_concurrency.lockutils [req-ca3564b9-5745-4efe-afa8-f44801b73af8 req-cf318d4a-f523-4ea9-9405-3be22150b9d6 service nova] Lock "e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.216465] env[69475]: DEBUG oslo_concurrency.lockutils [req-ca3564b9-5745-4efe-afa8-f44801b73af8 req-cf318d4a-f523-4ea9-9405-3be22150b9d6 service nova] Lock "e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.216633] env[69475]: DEBUG nova.compute.manager [req-ca3564b9-5745-4efe-afa8-f44801b73af8 req-cf318d4a-f523-4ea9-9405-3be22150b9d6 service nova] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] No waiting events found dispatching network-vif-plugged-6c87b79b-ed3d-448d-a02d-1004956a1d8d {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 792.216792] env[69475]: WARNING nova.compute.manager [req-ca3564b9-5745-4efe-afa8-f44801b73af8 req-cf318d4a-f523-4ea9-9405-3be22150b9d6 service nova] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Received unexpected event network-vif-plugged-6c87b79b-ed3d-448d-a02d-1004956a1d8d for instance with vm_state building and task_state spawning. [ 792.357853] env[69475]: DEBUG nova.network.neutron [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Successfully updated port: 6c87b79b-ed3d-448d-a02d-1004956a1d8d {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 792.616953] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520794f9-69a3-4b06-953d-af94d7dea20f, 'name': SearchDatastore_Task, 'duration_secs': 0.024686} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 792.617230] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.617484] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 41c23568-c8d7-4d6c-8cc4-a94c95b3223a/41c23568-c8d7-4d6c-8cc4-a94c95b3223a.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 792.617736] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39db6985-ee3f-4373-98d8-c03764045d2c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.625539] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Waiting for the task: (returnval){ [ 792.625539] env[69475]: value = "task-3508037" [ 792.625539] env[69475]: _type = "Task" [ 792.625539] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.633101] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': task-3508037, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.638966] env[69475]: DEBUG nova.scheduler.client.report [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Updated inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da with generation 74 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 792.639353] env[69475]: DEBUG nova.compute.provider_tree [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Updating resource provider dd221100-68c1-4a75-92b5-b24d81fee5da generation from 74 to 75 during operation: update_inventory {{(pid=69475) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 792.640038] env[69475]: DEBUG nova.compute.provider_tree [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 792.648867] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 792.649862] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 792.649862] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 792.649862] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 792.649862] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 792.649862] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 792.650115] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 792.650115] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 792.650358] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 792.650551] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 792.650757] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 792.655929] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4a45a697-3a27-4df9-bf33-1a2202afb4ce {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.672270] env[69475]: DEBUG oslo_vmware.api [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 792.672270] env[69475]: value = "task-3508038" [ 792.672270] env[69475]: _type = "Task" [ 792.672270] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.680548] env[69475]: DEBUG oslo_vmware.api [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508038, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.710541] env[69475]: DEBUG oslo_vmware.rw_handles [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5219a0e6-de3e-321d-ee40-999595476232/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 792.711479] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a5c5b7-065e-4f76-a0ec-304ecd684e87 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.720133] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508036, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.722251] env[69475]: DEBUG oslo_vmware.rw_handles [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5219a0e6-de3e-321d-ee40-999595476232/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 792.722509] env[69475]: ERROR oslo_vmware.rw_handles [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5219a0e6-de3e-321d-ee40-999595476232/disk-0.vmdk due to incomplete transfer. [ 792.722826] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ed7d4aeb-5b24-4a83-95e5-588dbe2bd405 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.731026] env[69475]: DEBUG oslo_vmware.rw_handles [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5219a0e6-de3e-321d-ee40-999595476232/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 792.731026] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Uploaded image 091c65f1-c5d2-4582-a0e4-5024ab969207 to the Glance image server {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 792.732573] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 792.733156] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ce080aee-818d-4fda-9716-36f52d3ce194 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.738139] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 792.738139] env[69475]: value = "task-3508039" [ 792.738139] env[69475]: _type = "Task" [ 792.738139] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.745431] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508039, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.861824] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "refresh_cache-e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.861975] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired lock "refresh_cache-e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 792.862196] env[69475]: DEBUG nova.network.neutron [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 793.139863] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': task-3508037, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.145628] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.911s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 793.146204] env[69475]: DEBUG nova.compute.manager [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 793.148898] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.974s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 793.150492] env[69475]: INFO nova.compute.claims [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 793.185092] env[69475]: DEBUG oslo_vmware.api [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508038, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.214559] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508036, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.248750] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508039, 'name': Destroy_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.411544] env[69475]: DEBUG nova.network.neutron [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 793.621282] env[69475]: DEBUG nova.network.neutron [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Updating instance_info_cache with network_info: [{"id": "6c87b79b-ed3d-448d-a02d-1004956a1d8d", "address": "fa:16:3e:91:6a:5e", "network": {"id": "49b579f0-c5a9-487e-b469-7e6420355dce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-83616973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6dd9c026624896ae4de7fab35720d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c87b79b-ed", "ovs_interfaceid": "6c87b79b-ed3d-448d-a02d-1004956a1d8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.635875] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': task-3508037, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.722311} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.636844] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 41c23568-c8d7-4d6c-8cc4-a94c95b3223a/41c23568-c8d7-4d6c-8cc4-a94c95b3223a.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 793.637087] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 793.637351] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9d9122f-6d43-4e90-924f-4c4f5c184338 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.643492] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Waiting for the task: (returnval){ [ 793.643492] env[69475]: value = "task-3508040" [ 793.643492] env[69475]: _type = "Task" [ 793.643492] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.652242] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': task-3508040, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.654575] env[69475]: DEBUG nova.compute.utils [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 793.658774] env[69475]: DEBUG nova.compute.manager [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 793.658898] env[69475]: DEBUG nova.network.neutron [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 793.683307] env[69475]: DEBUG oslo_vmware.api [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508038, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.697141] env[69475]: DEBUG nova.policy [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34ec94b3705a455a8bd13f54927167ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b14737c5edf94580b711ca21258a8811', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 793.715163] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508036, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.748537] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508039, 'name': Destroy_Task, 'duration_secs': 0.633871} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.748799] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Destroyed the VM [ 793.749132] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 793.749407] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4d6313a4-05f9-4bc7-80ff-d0b936a38a85 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.757412] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 793.757412] env[69475]: value = "task-3508041" [ 793.757412] env[69475]: _type = "Task" [ 793.757412] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.765390] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508041, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.040705] env[69475]: DEBUG nova.network.neutron [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Successfully created port: eeaf90cd-47e0-4b13-b5a2-efabd98551b7 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 794.125213] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Releasing lock "refresh_cache-e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.125213] env[69475]: DEBUG nova.compute.manager [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Instance network_info: |[{"id": "6c87b79b-ed3d-448d-a02d-1004956a1d8d", "address": "fa:16:3e:91:6a:5e", "network": {"id": "49b579f0-c5a9-487e-b469-7e6420355dce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-83616973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6dd9c026624896ae4de7fab35720d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c87b79b-ed", "ovs_interfaceid": "6c87b79b-ed3d-448d-a02d-1004956a1d8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 794.125424] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:6a:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6c87b79b-ed3d-448d-a02d-1004956a1d8d', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 794.134618] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 794.134848] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 794.135094] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-925f8b34-bda6-419c-af90-0c79b798c127 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.160417] env[69475]: DEBUG nova.compute.manager [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 794.168646] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': task-3508040, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.116283} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.168847] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 794.168847] env[69475]: value = "task-3508042" [ 794.168847] env[69475]: _type = "Task" [ 794.168847] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.169727] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 794.170587] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9324b5-9522-422e-a044-2d1be1938cef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.201743] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] 41c23568-c8d7-4d6c-8cc4-a94c95b3223a/41c23568-c8d7-4d6c-8cc4-a94c95b3223a.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 794.208603] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57d4b53f-2001-44ac-b4ef-19e8d2bbb861 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.223190] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508042, 'name': CreateVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.229423] env[69475]: DEBUG oslo_vmware.api [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508038, 'name': ReconfigVM_Task, 'duration_secs': 1.315865} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.230070] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Updating instance '4b3b53d1-82bf-40e7-9988-af7b51e9883a' progress to 33 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 794.235485] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Waiting for the task: (returnval){ [ 794.235485] env[69475]: value = "task-3508043" [ 794.235485] env[69475]: _type = "Task" [ 794.235485] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.244161] env[69475]: DEBUG oslo_vmware.api [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508036, 'name': PowerOnVM_Task, 'duration_secs': 1.563297} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.245211] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 794.245412] env[69475]: INFO nova.compute.manager [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Took 11.08 seconds to spawn the instance on the hypervisor. [ 794.245650] env[69475]: DEBUG nova.compute.manager [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 794.246444] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-743dcb0c-87e5-4167-a05e-994bfc1e5c7a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.253409] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': task-3508043, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.270525] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508041, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.349021] env[69475]: DEBUG nova.compute.manager [req-4ea7dcd2-bac0-4fc7-a31a-6aa4a6828e36 req-13230c1c-5ba8-43d6-846a-3a2853505e40 service nova] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Received event network-changed-6c87b79b-ed3d-448d-a02d-1004956a1d8d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 794.349397] env[69475]: DEBUG nova.compute.manager [req-4ea7dcd2-bac0-4fc7-a31a-6aa4a6828e36 req-13230c1c-5ba8-43d6-846a-3a2853505e40 service nova] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Refreshing instance network info cache due to event network-changed-6c87b79b-ed3d-448d-a02d-1004956a1d8d. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 794.349540] env[69475]: DEBUG oslo_concurrency.lockutils [req-4ea7dcd2-bac0-4fc7-a31a-6aa4a6828e36 req-13230c1c-5ba8-43d6-846a-3a2853505e40 service nova] Acquiring lock "refresh_cache-e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 794.349666] env[69475]: DEBUG oslo_concurrency.lockutils [req-4ea7dcd2-bac0-4fc7-a31a-6aa4a6828e36 req-13230c1c-5ba8-43d6-846a-3a2853505e40 service nova] Acquired lock "refresh_cache-e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 794.349859] env[69475]: DEBUG nova.network.neutron [req-4ea7dcd2-bac0-4fc7-a31a-6aa4a6828e36 req-13230c1c-5ba8-43d6-846a-3a2853505e40 service nova] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Refreshing network info cache for port 6c87b79b-ed3d-448d-a02d-1004956a1d8d {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 794.698644] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508042, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.731100] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa155869-2069-41e7-99ac-e0d36b502c7d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.741170] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 794.741410] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 794.741565] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 794.741804] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 794.741952] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 794.742109] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 794.742313] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 794.742471] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 794.742645] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 794.742828] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 794.743076] env[69475]: DEBUG nova.virt.hardware [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 794.748324] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Reconfiguring VM instance instance-00000028 to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 794.749203] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c94d8d04-1197-4932-b839-a950f212e85c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.774063] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6adc7a10-4327-45fc-a818-9ae70b7d30fa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.792824] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': task-3508043, 'name': ReconfigVM_Task, 'duration_secs': 0.27233} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.793320] env[69475]: INFO nova.compute.manager [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Took 38.64 seconds to build instance. [ 794.819953] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Reconfigured VM instance instance-00000032 to attach disk [datastore1] 41c23568-c8d7-4d6c-8cc4-a94c95b3223a/41c23568-c8d7-4d6c-8cc4-a94c95b3223a.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 794.821765] env[69475]: DEBUG oslo_vmware.api [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 794.821765] env[69475]: value = "task-3508044" [ 794.821765] env[69475]: _type = "Task" [ 794.821765] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.822340] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e1d6b6f0-cc89-4f95-a891-55551fa0f72a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "420ecc09-60c8-4a14-8504-d11d760ddbb4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.542s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 794.825568] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-18f65067-7b76-41dd-8f0e-86445e8ae93e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.827452] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71be7d40-7b8a-48d9-9341-3a66ed7b3c24 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.831489] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508041, 'name': RemoveSnapshot_Task} progress is 26%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.844845] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cefd2b21-c66c-4c56-80eb-eb83bd4f8188 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.849919] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Waiting for the task: (returnval){ [ 794.849919] env[69475]: value = "task-3508045" [ 794.849919] env[69475]: _type = "Task" [ 794.849919] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.850191] env[69475]: DEBUG oslo_vmware.api [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508044, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.865118] env[69475]: DEBUG nova.compute.provider_tree [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.873192] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': task-3508045, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.106531] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquiring lock "25c44ae0-4193-4833-85ec-ebc0ef3cf593" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.106824] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "25c44ae0-4193-4833-85ec-ebc0ef3cf593" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.107077] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquiring lock "25c44ae0-4193-4833-85ec-ebc0ef3cf593-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.107671] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "25c44ae0-4193-4833-85ec-ebc0ef3cf593-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.107850] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "25c44ae0-4193-4833-85ec-ebc0ef3cf593-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.114571] env[69475]: INFO nova.compute.manager [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Terminating instance [ 795.175550] env[69475]: DEBUG nova.compute.manager [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 795.182070] env[69475]: DEBUG nova.network.neutron [req-4ea7dcd2-bac0-4fc7-a31a-6aa4a6828e36 req-13230c1c-5ba8-43d6-846a-3a2853505e40 service nova] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Updated VIF entry in instance network info cache for port 6c87b79b-ed3d-448d-a02d-1004956a1d8d. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 795.182420] env[69475]: DEBUG nova.network.neutron [req-4ea7dcd2-bac0-4fc7-a31a-6aa4a6828e36 req-13230c1c-5ba8-43d6-846a-3a2853505e40 service nova] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Updating instance_info_cache with network_info: [{"id": "6c87b79b-ed3d-448d-a02d-1004956a1d8d", "address": "fa:16:3e:91:6a:5e", "network": {"id": "49b579f0-c5a9-487e-b469-7e6420355dce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-83616973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6dd9c026624896ae4de7fab35720d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6c87b79b-ed", "ovs_interfaceid": "6c87b79b-ed3d-448d-a02d-1004956a1d8d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 795.198061] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508042, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.209633] env[69475]: DEBUG nova.virt.hardware [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 795.209899] env[69475]: DEBUG nova.virt.hardware [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 795.210074] env[69475]: DEBUG nova.virt.hardware [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 795.210259] env[69475]: DEBUG nova.virt.hardware [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 795.210405] env[69475]: DEBUG nova.virt.hardware [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 795.210548] env[69475]: DEBUG nova.virt.hardware [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 795.210758] env[69475]: DEBUG nova.virt.hardware [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 795.210917] env[69475]: DEBUG nova.virt.hardware [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 795.211181] env[69475]: DEBUG nova.virt.hardware [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 795.211416] env[69475]: DEBUG nova.virt.hardware [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 795.211593] env[69475]: DEBUG nova.virt.hardware [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 795.212943] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711718aa-7058-49c8-9f65-2bc842d86ca5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.221104] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fb0ac5-b2ea-42e7-ada5-1dd6b9427c0a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.287125] env[69475]: DEBUG oslo_vmware.api [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508041, 'name': RemoveSnapshot_Task, 'duration_secs': 1.322009} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.287411] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 795.287659] env[69475]: INFO nova.compute.manager [None req-04cdcd3e-6fa9-4b4f-bdbe-67f9486b85a2 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Took 18.42 seconds to snapshot the instance on the hypervisor. [ 795.338318] env[69475]: DEBUG nova.compute.manager [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 795.347371] env[69475]: DEBUG oslo_vmware.api [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508044, 'name': ReconfigVM_Task, 'duration_secs': 0.159942} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.347371] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Reconfigured VM instance instance-00000028 to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 795.347371] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99bb9fb1-d254-4c1a-8b40-48486a243849 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.377619] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] 4b3b53d1-82bf-40e7-9988-af7b51e9883a/4b3b53d1-82bf-40e7-9988-af7b51e9883a.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 795.378265] env[69475]: DEBUG nova.scheduler.client.report [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 795.382223] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-369098e8-0496-4236-9ed5-ed9433c3d2b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.402744] env[69475]: DEBUG oslo_concurrency.lockutils [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "420ecc09-60c8-4a14-8504-d11d760ddbb4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.403068] env[69475]: DEBUG oslo_concurrency.lockutils [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "420ecc09-60c8-4a14-8504-d11d760ddbb4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.403335] env[69475]: DEBUG oslo_concurrency.lockutils [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "420ecc09-60c8-4a14-8504-d11d760ddbb4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.403553] env[69475]: DEBUG oslo_concurrency.lockutils [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "420ecc09-60c8-4a14-8504-d11d760ddbb4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.403765] env[69475]: DEBUG oslo_concurrency.lockutils [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "420ecc09-60c8-4a14-8504-d11d760ddbb4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.405936] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': task-3508045, 'name': Rename_Task, 'duration_secs': 0.222248} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.407420] env[69475]: INFO nova.compute.manager [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Terminating instance [ 795.411431] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 795.412587] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-faf63483-119a-49ed-8657-6e8895b5d22f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.417622] env[69475]: DEBUG oslo_vmware.api [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 795.417622] env[69475]: value = "task-3508046" [ 795.417622] env[69475]: _type = "Task" [ 795.417622] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.425648] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Waiting for the task: (returnval){ [ 795.425648] env[69475]: value = "task-3508047" [ 795.425648] env[69475]: _type = "Task" [ 795.425648] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.435866] env[69475]: DEBUG oslo_vmware.api [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508046, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.442030] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': task-3508047, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.530463] env[69475]: DEBUG oslo_concurrency.lockutils [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "3fba85c9-7798-4a66-b335-21f80962e0bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.530732] env[69475]: DEBUG oslo_concurrency.lockutils [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "3fba85c9-7798-4a66-b335-21f80962e0bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.530941] env[69475]: DEBUG oslo_concurrency.lockutils [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "3fba85c9-7798-4a66-b335-21f80962e0bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.531138] env[69475]: DEBUG oslo_concurrency.lockutils [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "3fba85c9-7798-4a66-b335-21f80962e0bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.531337] env[69475]: DEBUG oslo_concurrency.lockutils [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "3fba85c9-7798-4a66-b335-21f80962e0bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.533799] env[69475]: INFO nova.compute.manager [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Terminating instance [ 795.621725] env[69475]: DEBUG nova.compute.manager [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 795.621992] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 795.622893] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb8b437-3760-49c5-912f-08d4899ef146 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.630501] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 795.630741] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ac08e9d-9584-4ccb-931e-9c982ea44fc2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.637311] env[69475]: DEBUG oslo_vmware.api [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 795.637311] env[69475]: value = "task-3508048" [ 795.637311] env[69475]: _type = "Task" [ 795.637311] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.646147] env[69475]: DEBUG oslo_vmware.api [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508048, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.683803] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508042, 'name': CreateVM_Task, 'duration_secs': 1.165076} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.684020] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 795.685092] env[69475]: DEBUG oslo_concurrency.lockutils [req-4ea7dcd2-bac0-4fc7-a31a-6aa4a6828e36 req-13230c1c-5ba8-43d6-846a-3a2853505e40 service nova] Releasing lock "refresh_cache-e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.685909] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.686096] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.686436] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 795.686718] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d26d5a42-8f70-4a0a-8db4-073f19ed4a47 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.691442] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 795.691442] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5222c38b-17ee-f97f-7c5f-ac5c9fa6b1be" [ 795.691442] env[69475]: _type = "Task" [ 795.691442] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.702449] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5222c38b-17ee-f97f-7c5f-ac5c9fa6b1be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.859254] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.884186] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquiring lock "a87da6e4-d7ec-4624-94bc-b76ade04d511" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.884499] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "a87da6e4-d7ec-4624-94bc-b76ade04d511" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.899922] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.751s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.901436] env[69475]: DEBUG nova.compute.manager [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 795.905663] env[69475]: DEBUG oslo_concurrency.lockutils [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.272s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.907021] env[69475]: DEBUG nova.objects.instance [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Lazy-loading 'resources' on Instance uuid 00ba5cd8-3516-4059-bcda-c2d01e165e07 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 795.917965] env[69475]: DEBUG nova.compute.manager [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 795.918361] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 795.921019] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-461d2dcb-cb7b-405c-a516-144a495dcf09 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.936074] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 795.948696] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6c8c1e44-3065-4e56-a3fe-d3367e05a1c8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.949426] env[69475]: DEBUG oslo_vmware.api [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508046, 'name': ReconfigVM_Task, 'duration_secs': 0.464987} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.950485] env[69475]: DEBUG nova.network.neutron [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Successfully updated port: eeaf90cd-47e0-4b13-b5a2-efabd98551b7 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 795.952922] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': task-3508047, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.953127] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Reconfigured VM instance instance-00000028 to attach disk [datastore2] 4b3b53d1-82bf-40e7-9988-af7b51e9883a/4b3b53d1-82bf-40e7-9988-af7b51e9883a.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 795.953402] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Updating instance '4b3b53d1-82bf-40e7-9988-af7b51e9883a' progress to 50 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 795.962885] env[69475]: DEBUG oslo_vmware.api [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 795.962885] env[69475]: value = "task-3508049" [ 795.962885] env[69475]: _type = "Task" [ 795.962885] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.972765] env[69475]: DEBUG oslo_vmware.api [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508049, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.040080] env[69475]: DEBUG nova.compute.manager [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 796.040080] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 796.044241] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc5fb47-5bd5-4937-b231-3a1eb42f1088 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.055665] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 796.056264] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a936cb50-49af-4caa-8bb6-dd49dc1967dd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.064152] env[69475]: DEBUG oslo_vmware.api [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 796.064152] env[69475]: value = "task-3508050" [ 796.064152] env[69475]: _type = "Task" [ 796.064152] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.075502] env[69475]: DEBUG oslo_vmware.api [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508050, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.149342] env[69475]: DEBUG oslo_vmware.api [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508048, 'name': PowerOffVM_Task, 'duration_secs': 0.178548} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.149637] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 796.149807] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 796.150097] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ada9ee2-b04b-4393-a207-ee7610ae838d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.205588] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5222c38b-17ee-f97f-7c5f-ac5c9fa6b1be, 'name': SearchDatastore_Task, 'duration_secs': 0.010194} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.205952] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.206243] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 796.206510] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.206686] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.206912] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 796.207238] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f06863db-233d-4699-bcaf-1b14ecdeb1d3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.219221] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 796.219475] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 796.219716] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Deleting the datastore file [datastore2] 25c44ae0-4193-4833-85ec-ebc0ef3cf593 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 796.221718] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8dedd812-117f-4012-9e31-cc31395d3efb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.223070] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 796.223235] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 796.223980] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e59a497-1a36-40b8-a1de-933d1bbb37ee {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.229738] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 796.229738] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52482601-e038-3d57-c2f8-d60e9acc57b3" [ 796.229738] env[69475]: _type = "Task" [ 796.229738] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.231059] env[69475]: DEBUG oslo_vmware.api [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 796.231059] env[69475]: value = "task-3508052" [ 796.231059] env[69475]: _type = "Task" [ 796.231059] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.242293] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52482601-e038-3d57-c2f8-d60e9acc57b3, 'name': SearchDatastore_Task, 'duration_secs': 0.009195} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.246433] env[69475]: DEBUG oslo_vmware.api [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508052, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.246690] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2334852-35ba-4ddc-8967-4905436d6ebb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.251777] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 796.251777] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c3d49d-508a-0c6b-c174-e6ca23b2c905" [ 796.251777] env[69475]: _type = "Task" [ 796.251777] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.259998] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c3d49d-508a-0c6b-c174-e6ca23b2c905, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.410791] env[69475]: DEBUG nova.compute.utils [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 796.412410] env[69475]: DEBUG nova.compute.manager [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 796.412540] env[69475]: DEBUG nova.network.neutron [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 796.443357] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': task-3508047, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.457012] env[69475]: DEBUG nova.compute.manager [req-47f1c180-f05b-4df5-82b3-93fb1b390faa req-84df6833-dfbd-4d4e-add1-cea1061033ba service nova] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Received event network-vif-plugged-eeaf90cd-47e0-4b13-b5a2-efabd98551b7 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 796.457314] env[69475]: DEBUG oslo_concurrency.lockutils [req-47f1c180-f05b-4df5-82b3-93fb1b390faa req-84df6833-dfbd-4d4e-add1-cea1061033ba service nova] Acquiring lock "2e7066ca-162e-4465-a9c1-5422510e4c0f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 796.457933] env[69475]: DEBUG oslo_concurrency.lockutils [req-47f1c180-f05b-4df5-82b3-93fb1b390faa req-84df6833-dfbd-4d4e-add1-cea1061033ba service nova] Lock "2e7066ca-162e-4465-a9c1-5422510e4c0f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 796.457933] env[69475]: DEBUG oslo_concurrency.lockutils [req-47f1c180-f05b-4df5-82b3-93fb1b390faa req-84df6833-dfbd-4d4e-add1-cea1061033ba service nova] Lock "2e7066ca-162e-4465-a9c1-5422510e4c0f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 796.458238] env[69475]: DEBUG nova.compute.manager [req-47f1c180-f05b-4df5-82b3-93fb1b390faa req-84df6833-dfbd-4d4e-add1-cea1061033ba service nova] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] No waiting events found dispatching network-vif-plugged-eeaf90cd-47e0-4b13-b5a2-efabd98551b7 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 796.458396] env[69475]: WARNING nova.compute.manager [req-47f1c180-f05b-4df5-82b3-93fb1b390faa req-84df6833-dfbd-4d4e-add1-cea1061033ba service nova] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Received unexpected event network-vif-plugged-eeaf90cd-47e0-4b13-b5a2-efabd98551b7 for instance with vm_state building and task_state spawning. [ 796.458449] env[69475]: DEBUG nova.compute.manager [req-47f1c180-f05b-4df5-82b3-93fb1b390faa req-84df6833-dfbd-4d4e-add1-cea1061033ba service nova] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Received event network-changed-eeaf90cd-47e0-4b13-b5a2-efabd98551b7 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 796.458587] env[69475]: DEBUG nova.compute.manager [req-47f1c180-f05b-4df5-82b3-93fb1b390faa req-84df6833-dfbd-4d4e-add1-cea1061033ba service nova] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Refreshing instance network info cache due to event network-changed-eeaf90cd-47e0-4b13-b5a2-efabd98551b7. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 796.458816] env[69475]: DEBUG oslo_concurrency.lockutils [req-47f1c180-f05b-4df5-82b3-93fb1b390faa req-84df6833-dfbd-4d4e-add1-cea1061033ba service nova] Acquiring lock "refresh_cache-2e7066ca-162e-4465-a9c1-5422510e4c0f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.458958] env[69475]: DEBUG oslo_concurrency.lockutils [req-47f1c180-f05b-4df5-82b3-93fb1b390faa req-84df6833-dfbd-4d4e-add1-cea1061033ba service nova] Acquired lock "refresh_cache-2e7066ca-162e-4465-a9c1-5422510e4c0f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.459068] env[69475]: DEBUG nova.network.neutron [req-47f1c180-f05b-4df5-82b3-93fb1b390faa req-84df6833-dfbd-4d4e-add1-cea1061033ba service nova] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Refreshing network info cache for port eeaf90cd-47e0-4b13-b5a2-efabd98551b7 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 796.460604] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "refresh_cache-2e7066ca-162e-4465-a9c1-5422510e4c0f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.463734] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdbd743-7b93-47e5-bcef-20d0285caa94 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.476545] env[69475]: DEBUG nova.policy [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82f6c3724a2b4430b8df87655ff91c63', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1073981d0d7740e78805798e02ff9d55', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 796.498926] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3c771f-d43a-4d54-910a-36d41f1d6015 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.501832] env[69475]: DEBUG oslo_vmware.api [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508049, 'name': PowerOffVM_Task, 'duration_secs': 0.165646} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.502527] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 796.502714] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 796.503344] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a8a78d85-c676-44c1-9d99-f1980155324a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.526375] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Updating instance '4b3b53d1-82bf-40e7-9988-af7b51e9883a' progress to 67 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 796.577952] env[69475]: DEBUG oslo_vmware.api [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508050, 'name': PowerOffVM_Task, 'duration_secs': 0.179012} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.578264] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 796.578434] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 796.578720] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e913127-558c-4010-aaa8-6e373465c14d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.589749] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 796.589970] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 796.590374] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Deleting the datastore file [datastore1] 420ecc09-60c8-4a14-8504-d11d760ddbb4 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 796.590646] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-02bf3c31-1f3f-42c5-b88c-484ef4fa5b59 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.599613] env[69475]: DEBUG oslo_vmware.api [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 796.599613] env[69475]: value = "task-3508055" [ 796.599613] env[69475]: _type = "Task" [ 796.599613] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.608179] env[69475]: DEBUG oslo_vmware.api [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508055, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.644332] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 796.644607] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 796.644839] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Deleting the datastore file [datastore2] 3fba85c9-7798-4a66-b335-21f80962e0bd {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 796.645160] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-017678d0-a0e7-426a-acdc-2abecef68716 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.651888] env[69475]: DEBUG oslo_vmware.api [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for the task: (returnval){ [ 796.651888] env[69475]: value = "task-3508056" [ 796.651888] env[69475]: _type = "Task" [ 796.651888] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.663070] env[69475]: DEBUG oslo_vmware.api [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508056, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.745316] env[69475]: DEBUG oslo_vmware.api [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508052, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147161} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.745645] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 796.745786] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 796.745956] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 796.746162] env[69475]: INFO nova.compute.manager [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Took 1.12 seconds to destroy the instance on the hypervisor. [ 796.746418] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 796.746623] env[69475]: DEBUG nova.compute.manager [-] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 796.746731] env[69475]: DEBUG nova.network.neutron [-] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 796.769419] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c3d49d-508a-0c6b-c174-e6ca23b2c905, 'name': SearchDatastore_Task, 'duration_secs': 0.008039} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.770645] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.770645] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d/e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 796.770645] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d63efab-dbbc-4c2e-b92b-9079273e5e19 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.777394] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 796.777394] env[69475]: value = "task-3508057" [ 796.777394] env[69475]: _type = "Task" [ 796.777394] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.787647] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508057, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.923039] env[69475]: DEBUG nova.compute.manager [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 796.946341] env[69475]: DEBUG oslo_vmware.api [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': task-3508047, 'name': PowerOnVM_Task, 'duration_secs': 1.060201} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.950426] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 796.950552] env[69475]: INFO nova.compute.manager [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Took 10.44 seconds to spawn the instance on the hypervisor. [ 796.950737] env[69475]: DEBUG nova.compute.manager [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 796.951983] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbfaaaf1-905c-409b-a5a4-50c64ebea378 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.014146] env[69475]: DEBUG nova.network.neutron [req-47f1c180-f05b-4df5-82b3-93fb1b390faa req-84df6833-dfbd-4d4e-add1-cea1061033ba service nova] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 797.112849] env[69475]: DEBUG oslo_vmware.api [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508055, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172313} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.112849] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 797.112849] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 797.113538] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 797.113538] env[69475]: INFO nova.compute.manager [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Took 1.19 seconds to destroy the instance on the hypervisor. [ 797.113538] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 797.116393] env[69475]: DEBUG nova.compute.manager [-] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 797.116526] env[69475]: DEBUG nova.network.neutron [-] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 797.119475] env[69475]: DEBUG nova.network.neutron [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Port fd636137-6583-4c7a-937a-701561e4141a binding to destination host cpu-1 is already ACTIVE {{(pid=69475) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 797.165587] env[69475]: DEBUG oslo_vmware.api [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Task: {'id': task-3508056, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181112} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.165862] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 797.166093] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 797.166281] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 797.166452] env[69475]: INFO nova.compute.manager [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Took 1.13 seconds to destroy the instance on the hypervisor. [ 797.166695] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 797.167353] env[69475]: DEBUG nova.compute.manager [-] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 797.167353] env[69475]: DEBUG nova.network.neutron [-] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 797.187545] env[69475]: DEBUG nova.network.neutron [req-47f1c180-f05b-4df5-82b3-93fb1b390faa req-84df6833-dfbd-4d4e-add1-cea1061033ba service nova] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.200497] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4556038a-5c1b-42eb-bd6c-7113626bd1fe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.210980] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f1e090-5590-4b5a-83ac-462809a00e89 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.253235] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93c955b-c84f-47c9-85b8-6414f0472cc0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.264766] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0520fc47-d99d-42f1-9871-b226e8a2d019 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.284373] env[69475]: DEBUG nova.compute.provider_tree [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 797.297291] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508057, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.311275] env[69475]: DEBUG nova.network.neutron [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Successfully created port: 858c37b6-4824-46d3-9dff-c0e0d91c47b5 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 797.482322] env[69475]: INFO nova.compute.manager [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Took 38.47 seconds to build instance. [ 797.510080] env[69475]: DEBUG nova.compute.manager [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 797.511918] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-309d4b28-cb83-4db9-b3f2-18c9b78d2021 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.690369] env[69475]: DEBUG oslo_concurrency.lockutils [req-47f1c180-f05b-4df5-82b3-93fb1b390faa req-84df6833-dfbd-4d4e-add1-cea1061033ba service nova] Releasing lock "refresh_cache-2e7066ca-162e-4465-a9c1-5422510e4c0f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.690734] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquired lock "refresh_cache-2e7066ca-162e-4465-a9c1-5422510e4c0f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 797.690903] env[69475]: DEBUG nova.network.neutron [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 797.795715] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508057, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558344} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.795965] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d/e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 797.796197] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 797.796437] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-764e4cc4-d3f4-4780-9683-0a657fa4f157 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.803795] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 797.803795] env[69475]: value = "task-3508058" [ 797.803795] env[69475]: _type = "Task" [ 797.803795] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.812151] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508058, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.813079] env[69475]: ERROR nova.scheduler.client.report [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] [req-f925ee1d-392b-4316-b63f-b8a1d405c9b4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dd221100-68c1-4a75-92b5-b24d81fee5da. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f925ee1d-392b-4316-b63f-b8a1d405c9b4"}]} [ 797.833212] env[69475]: DEBUG nova.scheduler.client.report [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Refreshing inventories for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 797.848822] env[69475]: DEBUG nova.scheduler.client.report [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Updating ProviderTree inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 797.849977] env[69475]: DEBUG nova.compute.provider_tree [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 797.862698] env[69475]: DEBUG nova.scheduler.client.report [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Refreshing aggregate associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, aggregates: None {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 797.882302] env[69475]: DEBUG nova.scheduler.client.report [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Refreshing trait associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 797.929779] env[69475]: DEBUG nova.compute.manager [req-2d30f36a-dac2-4416-94b5-664eb4aaf045 req-092d93a2-64fe-4077-bd94-ae515e9f8f07 service nova] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Received event network-vif-deleted-c3c6dc43-00e2-4fba-acf9-0f100d3cf239 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 797.929970] env[69475]: INFO nova.compute.manager [req-2d30f36a-dac2-4416-94b5-664eb4aaf045 req-092d93a2-64fe-4077-bd94-ae515e9f8f07 service nova] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Neutron deleted interface c3c6dc43-00e2-4fba-acf9-0f100d3cf239; detaching it from the instance and deleting it from the info cache [ 797.930152] env[69475]: DEBUG nova.network.neutron [req-2d30f36a-dac2-4416-94b5-664eb4aaf045 req-092d93a2-64fe-4077-bd94-ae515e9f8f07 service nova] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.935026] env[69475]: DEBUG nova.compute.manager [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 797.960851] env[69475]: DEBUG nova.virt.hardware [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 797.962019] env[69475]: DEBUG nova.virt.hardware [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 797.962019] env[69475]: DEBUG nova.virt.hardware [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 797.962019] env[69475]: DEBUG nova.virt.hardware [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 797.962019] env[69475]: DEBUG nova.virt.hardware [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 797.962495] env[69475]: DEBUG nova.virt.hardware [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 797.962843] env[69475]: DEBUG nova.virt.hardware [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 797.966017] env[69475]: DEBUG nova.virt.hardware [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 797.966017] env[69475]: DEBUG nova.virt.hardware [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 797.966017] env[69475]: DEBUG nova.virt.hardware [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 797.966017] env[69475]: DEBUG nova.virt.hardware [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 797.966017] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee64c361-647e-478a-898c-11aa50b7edb6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.977673] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-715b00e4-3a73-4525-8d35-b626769c2122 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.983423] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5645ec1b-a6a6-490f-9abe-c0aa5a337e2c tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Lock "41c23568-c8d7-4d6c-8cc4-a94c95b3223a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.808s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.022620] env[69475]: DEBUG nova.network.neutron [-] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.029056] env[69475]: INFO nova.compute.manager [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] instance snapshotting [ 798.031434] env[69475]: DEBUG nova.network.neutron [-] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.035141] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5208345-937e-4cb4-a066-9a43e356519e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.059280] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd71834-42ef-40ce-b6fa-663290aa4ee7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.144993] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "4b3b53d1-82bf-40e7-9988-af7b51e9883a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 798.145666] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "4b3b53d1-82bf-40e7-9988-af7b51e9883a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.145666] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "4b3b53d1-82bf-40e7-9988-af7b51e9883a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.243105] env[69475]: DEBUG nova.network.neutron [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 798.317817] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508058, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064377} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.318080] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 798.319148] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382107dd-3e12-457b-a80c-200c748b4a96 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.344728] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d/e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 798.349880] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f26e4303-6c37-4dda-9ca7-fd67c2798b65 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.372235] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 798.372235] env[69475]: value = "task-3508059" [ 798.372235] env[69475]: _type = "Task" [ 798.372235] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.382629] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508059, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.437950] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-16afb114-d995-4e12-85a3-419e8f9618bc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.448365] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f3d679e-a42f-4cde-93c3-3aacb3bb27a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.489562] env[69475]: DEBUG nova.compute.manager [req-2d30f36a-dac2-4416-94b5-664eb4aaf045 req-092d93a2-64fe-4077-bd94-ae515e9f8f07 service nova] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Detach interface failed, port_id=c3c6dc43-00e2-4fba-acf9-0f100d3cf239, reason: Instance 3fba85c9-7798-4a66-b335-21f80962e0bd could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 798.490797] env[69475]: DEBUG nova.network.neutron [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Updating instance_info_cache with network_info: [{"id": "eeaf90cd-47e0-4b13-b5a2-efabd98551b7", "address": "fa:16:3e:de:c7:2d", "network": {"id": "7baa3036-0da2-4997-9256-7e09e8122eec", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1554281652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b14737c5edf94580b711ca21258a8811", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeeaf90cd-47", "ovs_interfaceid": "eeaf90cd-47e0-4b13-b5a2-efabd98551b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.499019] env[69475]: DEBUG nova.compute.manager [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 798.532078] env[69475]: INFO nova.compute.manager [-] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Took 1.36 seconds to deallocate network for instance. [ 798.538314] env[69475]: INFO nova.compute.manager [-] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Took 1.42 seconds to deallocate network for instance. [ 798.542018] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df0dcea0-1c24-4181-94b2-9c14b44bd12c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.544253] env[69475]: DEBUG nova.network.neutron [-] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 798.553029] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1be24e-48d2-4be4-b4f1-93b9e12bc43d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.589319] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 798.590150] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-24d53584-432d-458f-a857-ce097579069c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.592794] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee74a9e7-0b02-45d0-822a-f7325f2346a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.604037] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91957b86-2b87-43d9-a9be-82d2ccdc375a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.606502] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 798.606502] env[69475]: value = "task-3508060" [ 798.606502] env[69475]: _type = "Task" [ 798.606502] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.621541] env[69475]: DEBUG nova.compute.provider_tree [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 798.625292] env[69475]: DEBUG nova.compute.manager [req-3febe989-d48f-475c-89a8-fd45984499eb req-41a72888-2570-4648-a762-866a91d584b4 service nova] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Received event network-vif-deleted-73a9904f-d8b5-4a55-8338-3f26cce4f9f7 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 798.629028] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508060, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.795916] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Acquiring lock "41c23568-c8d7-4d6c-8cc4-a94c95b3223a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 798.796239] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Lock "41c23568-c8d7-4d6c-8cc4-a94c95b3223a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.796405] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Acquiring lock "41c23568-c8d7-4d6c-8cc4-a94c95b3223a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 798.796590] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Lock "41c23568-c8d7-4d6c-8cc4-a94c95b3223a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.796782] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Lock "41c23568-c8d7-4d6c-8cc4-a94c95b3223a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.798869] env[69475]: INFO nova.compute.manager [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Terminating instance [ 798.882668] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508059, 'name': ReconfigVM_Task, 'duration_secs': 0.280487} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.882988] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Reconfigured VM instance instance-00000033 to attach disk [datastore1] e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d/e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 798.883708] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7d069703-1eca-4b0d-80f0-c31ca04bc8fd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.889921] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 798.889921] env[69475]: value = "task-3508061" [ 798.889921] env[69475]: _type = "Task" [ 798.889921] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.897930] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508061, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.994911] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Releasing lock "refresh_cache-2e7066ca-162e-4465-a9c1-5422510e4c0f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 798.995428] env[69475]: DEBUG nova.compute.manager [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Instance network_info: |[{"id": "eeaf90cd-47e0-4b13-b5a2-efabd98551b7", "address": "fa:16:3e:de:c7:2d", "network": {"id": "7baa3036-0da2-4997-9256-7e09e8122eec", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1554281652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b14737c5edf94580b711ca21258a8811", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeeaf90cd-47", "ovs_interfaceid": "eeaf90cd-47e0-4b13-b5a2-efabd98551b7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 798.996050] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:c7:2d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e445fb59-822c-4d7d-943b-c8e3bbaca62e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eeaf90cd-47e0-4b13-b5a2-efabd98551b7', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 799.005035] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 799.007752] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 799.007952] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fa92aa38-adf3-4f1d-b308-5f0d9f929d75 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.029404] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 799.029404] env[69475]: value = "task-3508062" [ 799.029404] env[69475]: _type = "Task" [ 799.029404] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.037100] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508062, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.038167] env[69475]: DEBUG oslo_concurrency.lockutils [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.043152] env[69475]: DEBUG oslo_concurrency.lockutils [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.050684] env[69475]: INFO nova.compute.manager [-] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Took 2.30 seconds to deallocate network for instance. [ 799.051482] env[69475]: DEBUG oslo_concurrency.lockutils [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.117608] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508060, 'name': CreateSnapshot_Task, 'duration_secs': 0.491996} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.117881] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 799.118704] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ac2002-eeaf-4636-976c-649c8daa0c2a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.162096] env[69475]: DEBUG nova.scheduler.client.report [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Updated inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da with generation 76 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 799.162426] env[69475]: DEBUG nova.compute.provider_tree [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Updating resource provider dd221100-68c1-4a75-92b5-b24d81fee5da generation from 76 to 77 during operation: update_inventory {{(pid=69475) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 799.162658] env[69475]: DEBUG nova.compute.provider_tree [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 799.242366] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "refresh_cache-4b3b53d1-82bf-40e7-9988-af7b51e9883a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.242619] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquired lock "refresh_cache-4b3b53d1-82bf-40e7-9988-af7b51e9883a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.242907] env[69475]: DEBUG nova.network.neutron [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 799.303076] env[69475]: DEBUG nova.compute.manager [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 799.303401] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 799.304857] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77b73b1-0a78-41ed-af2d-b855dbd1465b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.312809] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 799.313075] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-089d7cfa-cc23-498f-8525-db36ce8539a6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.319696] env[69475]: DEBUG oslo_vmware.api [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Waiting for the task: (returnval){ [ 799.319696] env[69475]: value = "task-3508063" [ 799.319696] env[69475]: _type = "Task" [ 799.319696] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.328534] env[69475]: DEBUG oslo_vmware.api [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': task-3508063, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.401097] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508061, 'name': Rename_Task, 'duration_secs': 0.13552} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.401362] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 799.401603] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9fec464-a9f7-4b38-8e5c-ed8dc506c303 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.408863] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 799.408863] env[69475]: value = "task-3508064" [ 799.408863] env[69475]: _type = "Task" [ 799.408863] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.417644] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508064, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.540063] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508062, 'name': CreateVM_Task, 'duration_secs': 0.351831} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.540063] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 799.540433] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.540614] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.540976] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 799.541242] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c1bf5eb9-55f7-47ce-b76a-9e0e5ad1f2ba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.546628] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 799.546628] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52fcd7c0-651f-900f-3361-aa271df7d334" [ 799.546628] env[69475]: _type = "Task" [ 799.546628] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.554332] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52fcd7c0-651f-900f-3361-aa271df7d334, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.557568] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 799.637772] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 799.639013] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-de3f68cd-a80f-42e9-84c4-866733873ca9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.643276] env[69475]: DEBUG nova.network.neutron [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Successfully updated port: 858c37b6-4824-46d3-9dff-c0e0d91c47b5 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 799.648101] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 799.648101] env[69475]: value = "task-3508065" [ 799.648101] env[69475]: _type = "Task" [ 799.648101] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.658871] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508065, 'name': CloneVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.671605] env[69475]: DEBUG oslo_concurrency.lockutils [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.764s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 799.672241] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.704s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 799.674193] env[69475]: INFO nova.compute.claims [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 799.696018] env[69475]: INFO nova.scheduler.client.report [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Deleted allocations for instance 00ba5cd8-3516-4059-bcda-c2d01e165e07 [ 799.830965] env[69475]: DEBUG oslo_vmware.api [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': task-3508063, 'name': PowerOffVM_Task, 'duration_secs': 0.208252} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.831332] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 799.831468] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 799.831781] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1af3497c-58ad-4ff6-9522-15d69838b9d2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.901637] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 799.901936] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 799.902142] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Deleting the datastore file [datastore1] 41c23568-c8d7-4d6c-8cc4-a94c95b3223a {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 799.902424] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b27a2f42-45c2-46ab-a8a1-c223f76c8a52 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.909669] env[69475]: DEBUG oslo_vmware.api [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Waiting for the task: (returnval){ [ 799.909669] env[69475]: value = "task-3508067" [ 799.909669] env[69475]: _type = "Task" [ 799.909669] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.922662] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508064, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.925636] env[69475]: DEBUG oslo_vmware.api [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': task-3508067, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.955927] env[69475]: DEBUG nova.compute.manager [req-21de4094-d394-4b71-8a3c-2ad255037d1c req-abb62400-e34f-4b1a-9a9a-53335fc6d5e2 service nova] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Received event network-vif-deleted-1e66a927-4da7-44a7-8abe-812876507f48 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 800.034972] env[69475]: DEBUG nova.network.neutron [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Updating instance_info_cache with network_info: [{"id": "fd636137-6583-4c7a-937a-701561e4141a", "address": "fa:16:3e:82:54:fb", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd636137-65", "ovs_interfaceid": "fd636137-6583-4c7a-937a-701561e4141a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.058415] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52fcd7c0-651f-900f-3361-aa271df7d334, 'name': SearchDatastore_Task, 'duration_secs': 0.00812} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.058732] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 800.059351] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 800.059616] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.059810] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 800.059993] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 800.060526] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e736ed42-2861-4d3e-baf8-b2a4405e1f4c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.068972] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 800.069178] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 800.069908] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17c9fd92-3599-46ac-bf58-e44a6d81bb2f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.076825] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 800.076825] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521dd9e8-ec09-0c8b-785b-83f11c216077" [ 800.076825] env[69475]: _type = "Task" [ 800.076825] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.084739] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521dd9e8-ec09-0c8b-785b-83f11c216077, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.145968] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "refresh_cache-86464a01-e034-43b6-a6d5-45f9e3b6715b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.146486] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "refresh_cache-86464a01-e034-43b6-a6d5-45f9e3b6715b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 800.146873] env[69475]: DEBUG nova.network.neutron [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 800.158884] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508065, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.204627] env[69475]: DEBUG oslo_concurrency.lockutils [None req-eab7ab0a-967d-4cc9-aa9b-3689cbc47a3d tempest-AttachInterfacesUnderV243Test-1802281692 tempest-AttachInterfacesUnderV243Test-1802281692-project-member] Lock "00ba5cd8-3516-4059-bcda-c2d01e165e07" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.980s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.423332] env[69475]: DEBUG oslo_vmware.api [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508064, 'name': PowerOnVM_Task, 'duration_secs': 0.647645} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.425043] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 800.425177] env[69475]: INFO nova.compute.manager [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Took 9.16 seconds to spawn the instance on the hypervisor. [ 800.425329] env[69475]: DEBUG nova.compute.manager [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 800.425606] env[69475]: DEBUG oslo_vmware.api [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Task: {'id': task-3508067, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.425881} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.426286] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c34597cb-9b76-453b-97e1-dea1915eee3f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.428603] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 800.428794] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 800.428962] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 800.429139] env[69475]: INFO nova.compute.manager [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Took 1.13 seconds to destroy the instance on the hypervisor. [ 800.429369] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 800.429604] env[69475]: DEBUG nova.compute.manager [-] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 800.429709] env[69475]: DEBUG nova.network.neutron [-] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 800.538310] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Releasing lock "refresh_cache-4b3b53d1-82bf-40e7-9988-af7b51e9883a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 800.586867] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521dd9e8-ec09-0c8b-785b-83f11c216077, 'name': SearchDatastore_Task, 'duration_secs': 0.009329} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.587662] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81be04b4-9da0-474d-b2d7-3d310e806b7d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.600981] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 800.600981] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523b6fbd-c1c6-ec64-73db-ea17acbcbf6f" [ 800.600981] env[69475]: _type = "Task" [ 800.600981] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.610892] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523b6fbd-c1c6-ec64-73db-ea17acbcbf6f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.656099] env[69475]: DEBUG nova.compute.manager [req-770411a2-be93-4e72-ae3c-64c20c45ed11 req-eedfa000-0976-47ea-92b1-2b03a4fab6e5 service nova] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Received event network-vif-plugged-858c37b6-4824-46d3-9dff-c0e0d91c47b5 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 800.656099] env[69475]: DEBUG oslo_concurrency.lockutils [req-770411a2-be93-4e72-ae3c-64c20c45ed11 req-eedfa000-0976-47ea-92b1-2b03a4fab6e5 service nova] Acquiring lock "86464a01-e034-43b6-a6d5-45f9e3b6715b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.656305] env[69475]: DEBUG oslo_concurrency.lockutils [req-770411a2-be93-4e72-ae3c-64c20c45ed11 req-eedfa000-0976-47ea-92b1-2b03a4fab6e5 service nova] Lock "86464a01-e034-43b6-a6d5-45f9e3b6715b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.656505] env[69475]: DEBUG oslo_concurrency.lockutils [req-770411a2-be93-4e72-ae3c-64c20c45ed11 req-eedfa000-0976-47ea-92b1-2b03a4fab6e5 service nova] Lock "86464a01-e034-43b6-a6d5-45f9e3b6715b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.656636] env[69475]: DEBUG nova.compute.manager [req-770411a2-be93-4e72-ae3c-64c20c45ed11 req-eedfa000-0976-47ea-92b1-2b03a4fab6e5 service nova] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] No waiting events found dispatching network-vif-plugged-858c37b6-4824-46d3-9dff-c0e0d91c47b5 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 800.656810] env[69475]: WARNING nova.compute.manager [req-770411a2-be93-4e72-ae3c-64c20c45ed11 req-eedfa000-0976-47ea-92b1-2b03a4fab6e5 service nova] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Received unexpected event network-vif-plugged-858c37b6-4824-46d3-9dff-c0e0d91c47b5 for instance with vm_state building and task_state spawning. [ 800.656960] env[69475]: DEBUG nova.compute.manager [req-770411a2-be93-4e72-ae3c-64c20c45ed11 req-eedfa000-0976-47ea-92b1-2b03a4fab6e5 service nova] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Received event network-changed-858c37b6-4824-46d3-9dff-c0e0d91c47b5 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 800.657133] env[69475]: DEBUG nova.compute.manager [req-770411a2-be93-4e72-ae3c-64c20c45ed11 req-eedfa000-0976-47ea-92b1-2b03a4fab6e5 service nova] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Refreshing instance network info cache due to event network-changed-858c37b6-4824-46d3-9dff-c0e0d91c47b5. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 800.657298] env[69475]: DEBUG oslo_concurrency.lockutils [req-770411a2-be93-4e72-ae3c-64c20c45ed11 req-eedfa000-0976-47ea-92b1-2b03a4fab6e5 service nova] Acquiring lock "refresh_cache-86464a01-e034-43b6-a6d5-45f9e3b6715b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.666621] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508065, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.677962] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.678236] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 800.727742] env[69475]: DEBUG nova.network.neutron [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 800.950384] env[69475]: INFO nova.compute.manager [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Took 38.48 seconds to build instance. [ 801.014543] env[69475]: DEBUG nova.network.neutron [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Updating instance_info_cache with network_info: [{"id": "858c37b6-4824-46d3-9dff-c0e0d91c47b5", "address": "fa:16:3e:8e:8b:e4", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap858c37b6-48", "ovs_interfaceid": "858c37b6-4824-46d3-9dff-c0e0d91c47b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.061383] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01703020-cc41-4e7d-b2ee-8f97e91b3dbe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.090192] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62039ee1-beae-4d98-b367-fed47589be16 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.097790] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Updating instance '4b3b53d1-82bf-40e7-9988-af7b51e9883a' progress to 83 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 801.116453] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523b6fbd-c1c6-ec64-73db-ea17acbcbf6f, 'name': SearchDatastore_Task, 'duration_secs': 0.019127} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.116894] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 801.120718] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 2e7066ca-162e-4465-a9c1-5422510e4c0f/2e7066ca-162e-4465-a9c1-5422510e4c0f.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 801.124967] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d4174143-581b-4441-8cbe-5e7d753f737b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.129375] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 801.129375] env[69475]: value = "task-3508068" [ 801.129375] env[69475]: _type = "Task" [ 801.129375] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.137986] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508068, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.162085] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508065, 'name': CloneVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.188709] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.188948] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.189183] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.189387] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.189682] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.189756] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.189936] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69475) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 801.190480] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager.update_available_resource {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.246268] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d88c621-b1c0-405b-a201-58b97a9d3283 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.255375] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c10fc08d-d1e9-4177-8a34-49a4fda84967 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.289394] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f725b3a9-6ed0-4436-9b9a-e157b8738d10 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.297033] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e67316-1846-4372-9034-8fa736ae66c3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.312732] env[69475]: DEBUG nova.compute.provider_tree [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 801.423563] env[69475]: DEBUG nova.network.neutron [-] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.454941] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21a49838-6af2-4a81-a943-6c80e4ba9cc1 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 89.717s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.517954] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "refresh_cache-86464a01-e034-43b6-a6d5-45f9e3b6715b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 801.518124] env[69475]: DEBUG nova.compute.manager [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Instance network_info: |[{"id": "858c37b6-4824-46d3-9dff-c0e0d91c47b5", "address": "fa:16:3e:8e:8b:e4", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap858c37b6-48", "ovs_interfaceid": "858c37b6-4824-46d3-9dff-c0e0d91c47b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 801.518347] env[69475]: DEBUG oslo_concurrency.lockutils [req-770411a2-be93-4e72-ae3c-64c20c45ed11 req-eedfa000-0976-47ea-92b1-2b03a4fab6e5 service nova] Acquired lock "refresh_cache-86464a01-e034-43b6-a6d5-45f9e3b6715b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.518543] env[69475]: DEBUG nova.network.neutron [req-770411a2-be93-4e72-ae3c-64c20c45ed11 req-eedfa000-0976-47ea-92b1-2b03a4fab6e5 service nova] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Refreshing network info cache for port 858c37b6-4824-46d3-9dff-c0e0d91c47b5 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 801.519882] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:8b:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e23c1d18-c841-49ea-95f3-df5ceac28afd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '858c37b6-4824-46d3-9dff-c0e0d91c47b5', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 801.527753] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 801.531199] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 801.532248] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8b4f773-6bc5-4ecc-aa36-df77a859e079 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.553984] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 801.553984] env[69475]: value = "task-3508069" [ 801.553984] env[69475]: _type = "Task" [ 801.553984] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.562545] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508069, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.610486] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 801.610817] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd5c42f3-075d-4820-96c7-2bbcfc8dad67 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.620426] env[69475]: DEBUG oslo_vmware.api [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 801.620426] env[69475]: value = "task-3508070" [ 801.620426] env[69475]: _type = "Task" [ 801.620426] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.621904] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bdd8036-b952-4966-87bf-47908e9c4f89 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.638628] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-39609b0d-3a06-4afe-a61f-6df61a414ecd tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Suspending the VM {{(pid=69475) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 801.638976] env[69475]: DEBUG oslo_vmware.api [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508070, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.640184] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-11e97f3c-a0e7-4be9-a43f-8ea512d0e9c4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.649041] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508068, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.659088] env[69475]: DEBUG oslo_vmware.api [None req-39609b0d-3a06-4afe-a61f-6df61a414ecd tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 801.659088] env[69475]: value = "task-3508071" [ 801.659088] env[69475]: _type = "Task" [ 801.659088] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.665223] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508065, 'name': CloneVM_Task, 'duration_secs': 1.533887} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.666303] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Created linked-clone VM from snapshot [ 801.667431] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d6a630-89dd-455b-836d-afb674f36d15 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.673896] env[69475]: DEBUG oslo_vmware.api [None req-39609b0d-3a06-4afe-a61f-6df61a414ecd tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508071, 'name': SuspendVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.681416] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Uploading image 7b1de553-c880-4e56-a4a1-1df09903c04b {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 801.693745] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.715328] env[69475]: DEBUG oslo_vmware.rw_handles [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 801.715328] env[69475]: value = "vm-700981" [ 801.715328] env[69475]: _type = "VirtualMachine" [ 801.715328] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 801.715328] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-85f8e29a-9023-472f-9a2a-15450b50dd0e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.726157] env[69475]: DEBUG oslo_vmware.rw_handles [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lease: (returnval){ [ 801.726157] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52697534-da46-6352-1bc2-902af7169f41" [ 801.726157] env[69475]: _type = "HttpNfcLease" [ 801.726157] env[69475]: } obtained for exporting VM: (result){ [ 801.726157] env[69475]: value = "vm-700981" [ 801.726157] env[69475]: _type = "VirtualMachine" [ 801.726157] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 801.726157] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the lease: (returnval){ [ 801.726157] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52697534-da46-6352-1bc2-902af7169f41" [ 801.726157] env[69475]: _type = "HttpNfcLease" [ 801.726157] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 801.735043] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 801.735043] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52697534-da46-6352-1bc2-902af7169f41" [ 801.735043] env[69475]: _type = "HttpNfcLease" [ 801.735043] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 801.815940] env[69475]: DEBUG nova.scheduler.client.report [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 801.926361] env[69475]: INFO nova.compute.manager [-] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Took 1.50 seconds to deallocate network for instance. [ 801.957196] env[69475]: DEBUG nova.compute.manager [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 801.973038] env[69475]: DEBUG nova.network.neutron [req-770411a2-be93-4e72-ae3c-64c20c45ed11 req-eedfa000-0976-47ea-92b1-2b03a4fab6e5 service nova] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Updated VIF entry in instance network info cache for port 858c37b6-4824-46d3-9dff-c0e0d91c47b5. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 801.973038] env[69475]: DEBUG nova.network.neutron [req-770411a2-be93-4e72-ae3c-64c20c45ed11 req-eedfa000-0976-47ea-92b1-2b03a4fab6e5 service nova] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Updating instance_info_cache with network_info: [{"id": "858c37b6-4824-46d3-9dff-c0e0d91c47b5", "address": "fa:16:3e:8e:8b:e4", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap858c37b6-48", "ovs_interfaceid": "858c37b6-4824-46d3-9dff-c0e0d91c47b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.064964] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508069, 'name': CreateVM_Task, 'duration_secs': 0.393374} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.065225] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 802.066198] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.066355] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.066683] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 802.066944] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9634ef6-bfce-492e-8454-ff2c14762adf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.073618] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 802.073618] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f86ca-a35d-92cc-2750-bd1f2f7e3fda" [ 802.073618] env[69475]: _type = "Task" [ 802.073618] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.083542] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f86ca-a35d-92cc-2750-bd1f2f7e3fda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.131627] env[69475]: DEBUG oslo_vmware.api [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508070, 'name': PowerOnVM_Task, 'duration_secs': 0.434123} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.134998] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 802.135329] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bd2c45e3-cbf5-4842-b9f8-cb393eaad69b tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Updating instance '4b3b53d1-82bf-40e7-9988-af7b51e9883a' progress to 100 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 802.143621] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508068, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.557807} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.143863] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 2e7066ca-162e-4465-a9c1-5422510e4c0f/2e7066ca-162e-4465-a9c1-5422510e4c0f.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 802.145271] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 802.145908] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a47b76f-9d3c-4b65-bd24-cf555ed2455f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.154512] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 802.154512] env[69475]: value = "task-3508073" [ 802.154512] env[69475]: _type = "Task" [ 802.154512] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.176192] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508073, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.179621] env[69475]: DEBUG oslo_vmware.api [None req-39609b0d-3a06-4afe-a61f-6df61a414ecd tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508071, 'name': SuspendVM_Task} progress is 50%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.234952] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 802.234952] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52697534-da46-6352-1bc2-902af7169f41" [ 802.234952] env[69475]: _type = "HttpNfcLease" [ 802.234952] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 802.235352] env[69475]: DEBUG oslo_vmware.rw_handles [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 802.235352] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52697534-da46-6352-1bc2-902af7169f41" [ 802.235352] env[69475]: _type = "HttpNfcLease" [ 802.235352] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 802.236198] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef89cf79-71c0-4c96-a29e-bdcdc9b4cf24 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.244180] env[69475]: DEBUG oslo_vmware.rw_handles [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5292ca1a-da0b-745a-1673-f4430065fe5e/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 802.244406] env[69475]: DEBUG oslo_vmware.rw_handles [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5292ca1a-da0b-745a-1673-f4430065fe5e/disk-0.vmdk for reading. {{(pid=69475) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 802.321253] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.649s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.321803] env[69475]: DEBUG nova.compute.manager [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 802.325422] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.248s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.325655] env[69475]: DEBUG nova.objects.instance [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lazy-loading 'resources' on Instance uuid 3e332e28-5db5-4f04-8a47-95406da16e21 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 802.375867] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f9d61c79-3a5d-429d-8c0b-33b45dcf72a4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.435184] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.477295] env[69475]: DEBUG oslo_concurrency.lockutils [req-770411a2-be93-4e72-ae3c-64c20c45ed11 req-eedfa000-0976-47ea-92b1-2b03a4fab6e5 service nova] Releasing lock "refresh_cache-86464a01-e034-43b6-a6d5-45f9e3b6715b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.483862] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.590291] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f86ca-a35d-92cc-2750-bd1f2f7e3fda, 'name': SearchDatastore_Task, 'duration_secs': 0.071839} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.590291] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.590291] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 802.590291] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.590291] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.590291] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 802.590291] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c23d900-b602-4143-a536-b3100d37a935 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.605760] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 802.605760] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 802.608724] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4c1e976-173a-4ace-a7c9-ab0e8675ff3e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.615046] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 802.615046] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5261d0eb-201d-3508-6d33-545d86a013b4" [ 802.615046] env[69475]: _type = "Task" [ 802.615046] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.625902] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5261d0eb-201d-3508-6d33-545d86a013b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.674763] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508073, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.241782} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.676227] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 802.677032] env[69475]: DEBUG oslo_vmware.api [None req-39609b0d-3a06-4afe-a61f-6df61a414ecd tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508071, 'name': SuspendVM_Task} progress is 50%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.677606] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca63ab7e-e251-4457-b992-474f760e96d9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.684277] env[69475]: DEBUG nova.compute.manager [req-eed49431-409c-4632-980c-d20228406d2c req-5f653c8c-d489-402d-92f0-e17340e5f8d3 service nova] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Received event network-vif-deleted-cc6e2aa6-9a4f-46b5-9fbe-04f7e4551c5c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 802.705117] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] 2e7066ca-162e-4465-a9c1-5422510e4c0f/2e7066ca-162e-4465-a9c1-5422510e4c0f.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 802.706955] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77c52d73-e949-4bc5-a0c3-17a600d31e42 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.731340] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 802.731340] env[69475]: value = "task-3508074" [ 802.731340] env[69475]: _type = "Task" [ 802.731340] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.740307] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508074, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.829450] env[69475]: DEBUG nova.compute.utils [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 802.830983] env[69475]: DEBUG nova.compute.manager [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 802.831219] env[69475]: DEBUG nova.network.neutron [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 802.890204] env[69475]: DEBUG nova.policy [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '32d8efff6f9e4846b49febaf379f07fb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f1784f9c01de49c494bc44e0272c02cf', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 803.132589] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5261d0eb-201d-3508-6d33-545d86a013b4, 'name': SearchDatastore_Task, 'duration_secs': 0.036402} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.133690] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a8e7a92-131c-4bb9-a4f8-f1e1c26d4f55 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.145370] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 803.145370] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5285e276-265a-6190-647d-d497ed9af3f6" [ 803.145370] env[69475]: _type = "Task" [ 803.145370] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.166024] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5285e276-265a-6190-647d-d497ed9af3f6, 'name': SearchDatastore_Task, 'duration_secs': 0.017667} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.176019] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.176019] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 86464a01-e034-43b6-a6d5-45f9e3b6715b/86464a01-e034-43b6-a6d5-45f9e3b6715b.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 803.176019] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c43fc4a2-c743-491c-9239-5d107964f94b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.186792] env[69475]: DEBUG oslo_vmware.api [None req-39609b0d-3a06-4afe-a61f-6df61a414ecd tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508071, 'name': SuspendVM_Task, 'duration_secs': 1.254369} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.186792] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 803.186792] env[69475]: value = "task-3508075" [ 803.186792] env[69475]: _type = "Task" [ 803.186792] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.186792] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-39609b0d-3a06-4afe-a61f-6df61a414ecd tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Suspended the VM {{(pid=69475) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 803.187260] env[69475]: DEBUG nova.compute.manager [None req-39609b0d-3a06-4afe-a61f-6df61a414ecd tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 803.188532] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f77d56b-41bc-42d3-bdb8-df407ace3f2e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.206307] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508075, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.249978] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508074, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.337426] env[69475]: DEBUG nova.compute.manager [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 803.474501] env[69475]: DEBUG nova.network.neutron [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Successfully created port: 05ec4e25-3c6d-4d4b-a353-a749c7ee1242 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 803.497411] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1abc8747-0b7c-4138-99d8-30236486687e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.508860] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed3d998-de31-46f2-9af5-cf73f8cdf71d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.548569] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-562082bd-7251-402a-8976-9283247ac881 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.556979] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b21986fd-1983-40c2-a0e3-62e19c0b924e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.574494] env[69475]: DEBUG nova.compute.provider_tree [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 803.704266] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508075, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.741659] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508074, 'name': ReconfigVM_Task, 'duration_secs': 0.571161} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.742089] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Reconfigured VM instance instance-00000034 to attach disk [datastore2] 2e7066ca-162e-4465-a9c1-5422510e4c0f/2e7066ca-162e-4465-a9c1-5422510e4c0f.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 803.742840] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-24d21714-f5f8-4731-8ceb-7ad2c4a54821 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.752407] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 803.752407] env[69475]: value = "task-3508076" [ 803.752407] env[69475]: _type = "Task" [ 803.752407] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.765715] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508076, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.078577] env[69475]: DEBUG nova.scheduler.client.report [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 804.203430] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508075, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.624205} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.203971] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 86464a01-e034-43b6-a6d5-45f9e3b6715b/86464a01-e034-43b6-a6d5-45f9e3b6715b.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 804.204138] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 804.204480] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d4786a9d-b300-4015-a3a8-ce3edc23ae33 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.211271] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 804.211271] env[69475]: value = "task-3508077" [ 804.211271] env[69475]: _type = "Task" [ 804.211271] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.220853] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508077, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.265348] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508076, 'name': Rename_Task, 'duration_secs': 0.317696} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.268918] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 804.268918] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97fe6157-2ce5-4e8d-a585-c90a7e201b67 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.273689] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 804.273689] env[69475]: value = "task-3508078" [ 804.273689] env[69475]: _type = "Task" [ 804.273689] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.292186] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508078, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.350929] env[69475]: DEBUG nova.compute.manager [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 804.585394] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.259s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.587895] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.845s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.589371] env[69475]: INFO nova.compute.claims [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 804.623656] env[69475]: INFO nova.scheduler.client.report [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Deleted allocations for instance 3e332e28-5db5-4f04-8a47-95406da16e21 [ 804.678573] env[69475]: DEBUG nova.virt.hardware [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 804.679236] env[69475]: DEBUG nova.virt.hardware [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 804.679496] env[69475]: DEBUG nova.virt.hardware [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 804.679755] env[69475]: DEBUG nova.virt.hardware [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 804.680074] env[69475]: DEBUG nova.virt.hardware [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 804.680650] env[69475]: DEBUG nova.virt.hardware [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 804.680650] env[69475]: DEBUG nova.virt.hardware [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 804.681226] env[69475]: DEBUG nova.virt.hardware [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 804.681923] env[69475]: DEBUG nova.virt.hardware [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 804.681923] env[69475]: DEBUG nova.virt.hardware [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 804.681923] env[69475]: DEBUG nova.virt.hardware [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 804.684642] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477f864b-9d66-4309-a907-fbbeeaa89ea2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.694363] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3afd52d0-5462-4889-a1dc-9f9793ef6992 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.724411] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508077, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074424} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.724781] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 804.725656] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db476df-6df9-4ed2-91e1-3855d7ec1976 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.754486] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] 86464a01-e034-43b6-a6d5-45f9e3b6715b/86464a01-e034-43b6-a6d5-45f9e3b6715b.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 804.754954] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a435ae4e-5210-4e83-a339-73178d417a80 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.780034] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 804.780034] env[69475]: value = "task-3508079" [ 804.780034] env[69475]: _type = "Task" [ 804.780034] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.787990] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508078, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.793809] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508079, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.132431] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a01600c-c1dd-41ee-9b11-9ef7a1ec4075 tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "3e332e28-5db5-4f04-8a47-95406da16e21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.619s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.166325] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "4b3b53d1-82bf-40e7-9988-af7b51e9883a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.166325] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "4b3b53d1-82bf-40e7-9988-af7b51e9883a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.166604] env[69475]: DEBUG nova.compute.manager [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Going to confirm migration 1 {{(pid=69475) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 805.287462] env[69475]: DEBUG oslo_vmware.api [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508078, 'name': PowerOnVM_Task, 'duration_secs': 0.88829} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.288174] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 805.288428] env[69475]: INFO nova.compute.manager [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Took 10.11 seconds to spawn the instance on the hypervisor. [ 805.288589] env[69475]: DEBUG nova.compute.manager [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 805.289782] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9479d4-51f7-4f4c-9361-10564bdb991b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.296100] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508079, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.501987] env[69475]: DEBUG nova.compute.manager [req-4cf10595-ea3b-4a56-a0b5-1839f73c6cb7 req-067a61dc-45eb-4d8f-ae70-4fb363c374a0 service nova] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Received event network-vif-plugged-05ec4e25-3c6d-4d4b-a353-a749c7ee1242 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 805.502232] env[69475]: DEBUG oslo_concurrency.lockutils [req-4cf10595-ea3b-4a56-a0b5-1839f73c6cb7 req-067a61dc-45eb-4d8f-ae70-4fb363c374a0 service nova] Acquiring lock "2b0cc71c-862e-4eb0-afc4-b2125003b087-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.502446] env[69475]: DEBUG oslo_concurrency.lockutils [req-4cf10595-ea3b-4a56-a0b5-1839f73c6cb7 req-067a61dc-45eb-4d8f-ae70-4fb363c374a0 service nova] Lock "2b0cc71c-862e-4eb0-afc4-b2125003b087-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.502609] env[69475]: DEBUG oslo_concurrency.lockutils [req-4cf10595-ea3b-4a56-a0b5-1839f73c6cb7 req-067a61dc-45eb-4d8f-ae70-4fb363c374a0 service nova] Lock "2b0cc71c-862e-4eb0-afc4-b2125003b087-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 805.502823] env[69475]: DEBUG nova.compute.manager [req-4cf10595-ea3b-4a56-a0b5-1839f73c6cb7 req-067a61dc-45eb-4d8f-ae70-4fb363c374a0 service nova] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] No waiting events found dispatching network-vif-plugged-05ec4e25-3c6d-4d4b-a353-a749c7ee1242 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 805.504892] env[69475]: WARNING nova.compute.manager [req-4cf10595-ea3b-4a56-a0b5-1839f73c6cb7 req-067a61dc-45eb-4d8f-ae70-4fb363c374a0 service nova] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Received unexpected event network-vif-plugged-05ec4e25-3c6d-4d4b-a353-a749c7ee1242 for instance with vm_state building and task_state spawning. [ 805.645084] env[69475]: DEBUG nova.network.neutron [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Successfully updated port: 05ec4e25-3c6d-4d4b-a353-a749c7ee1242 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 805.714381] env[69475]: DEBUG nova.compute.manager [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 805.716294] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbfea21e-234a-4614-a400-537aa5e55fe9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.780141] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "refresh_cache-4b3b53d1-82bf-40e7-9988-af7b51e9883a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.780141] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquired lock "refresh_cache-4b3b53d1-82bf-40e7-9988-af7b51e9883a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 805.780141] env[69475]: DEBUG nova.network.neutron [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 805.780141] env[69475]: DEBUG nova.objects.instance [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lazy-loading 'info_cache' on Instance uuid 4b3b53d1-82bf-40e7-9988-af7b51e9883a {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 805.793577] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508079, 'name': ReconfigVM_Task, 'duration_secs': 0.560398} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.794070] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Reconfigured VM instance instance-00000035 to attach disk [datastore2] 86464a01-e034-43b6-a6d5-45f9e3b6715b/86464a01-e034-43b6-a6d5-45f9e3b6715b.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 805.794862] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5bf5a1cf-7cd9-4f8d-90a1-0ea62066c9d2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.813366] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 805.813366] env[69475]: value = "task-3508080" [ 805.813366] env[69475]: _type = "Task" [ 805.813366] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.818910] env[69475]: INFO nova.compute.manager [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Took 39.46 seconds to build instance. [ 805.832041] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508080, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.148099] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "refresh_cache-2b0cc71c-862e-4eb0-afc4-b2125003b087" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.148359] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired lock "refresh_cache-2b0cc71c-862e-4eb0-afc4-b2125003b087" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.148437] env[69475]: DEBUG nova.network.neutron [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 806.229021] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f8014d-bdd6-4170-90f1-d6fbf8358712 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.234402] env[69475]: INFO nova.compute.manager [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] instance snapshotting [ 806.234832] env[69475]: WARNING nova.compute.manager [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 806.245020] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1543b390-2ae2-414d-9894-dc2f0adfde96 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.245907] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6c7c73-53be-45a2-9db2-eaa39ff442a4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.295432] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f530c9f3-c474-44f9-80f7-26bf54816734 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.304849] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a136ece6-d0c0-461f-ae1d-499d7aae6324 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.315186] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b11b24ca-10c9-4e5a-9b8c-9514d8643fd2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.326648] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51fe4aaa-e615-45ff-a4cb-2789ddd4ce7a tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "2e7066ca-162e-4465-a9c1-5422510e4c0f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.581s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.338293] env[69475]: DEBUG nova.compute.provider_tree [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 806.344901] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508080, 'name': Rename_Task, 'duration_secs': 0.202886} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.345153] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 806.345435] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2bd18d3e-246f-4bb6-baa3-a93309e67960 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.355476] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 806.355476] env[69475]: value = "task-3508081" [ 806.355476] env[69475]: _type = "Task" [ 806.355476] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.367413] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508081, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.707070] env[69475]: DEBUG nova.network.neutron [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.825570] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 806.825967] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-7016bb30-b4c4-44bc-acd2-df928e94cec0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.834489] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 806.834489] env[69475]: value = "task-3508082" [ 806.834489] env[69475]: _type = "Task" [ 806.834489] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.839306] env[69475]: DEBUG nova.compute.manager [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 806.854151] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508082, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.875224] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508081, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.894026] env[69475]: DEBUG nova.scheduler.client.report [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Updated inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da with generation 77 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 806.894744] env[69475]: DEBUG nova.compute.provider_tree [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Updating resource provider dd221100-68c1-4a75-92b5-b24d81fee5da generation from 77 to 78 during operation: update_inventory {{(pid=69475) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 806.895128] env[69475]: DEBUG nova.compute.provider_tree [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 806.907702] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "712e93b6-e797-4b9f-b39b-33373cede403" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.907702] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "712e93b6-e797-4b9f-b39b-33373cede403" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.907836] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "712e93b6-e797-4b9f-b39b-33373cede403-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.908100] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "712e93b6-e797-4b9f-b39b-33373cede403-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.911018] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "712e93b6-e797-4b9f-b39b-33373cede403-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.913235] env[69475]: INFO nova.compute.manager [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Terminating instance [ 806.941790] env[69475]: DEBUG nova.network.neutron [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Updating instance_info_cache with network_info: [{"id": "05ec4e25-3c6d-4d4b-a353-a749c7ee1242", "address": "fa:16:3e:3e:78:bf", "network": {"id": "254ff636-2fcb-4d4c-b050-89948230fa0d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2128688610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1784f9c01de49c494bc44e0272c02cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05ec4e25-3c", "ovs_interfaceid": "05ec4e25-3c6d-4d4b-a353-a749c7ee1242", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.094273] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "daef2117-0d9f-4c9e-99e7-1e8a65aa1f22" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.094627] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "daef2117-0d9f-4c9e-99e7-1e8a65aa1f22" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.094917] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "daef2117-0d9f-4c9e-99e7-1e8a65aa1f22-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.095191] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "daef2117-0d9f-4c9e-99e7-1e8a65aa1f22-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.095444] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "daef2117-0d9f-4c9e-99e7-1e8a65aa1f22-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 807.100885] env[69475]: INFO nova.compute.manager [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Terminating instance [ 807.174182] env[69475]: DEBUG nova.network.neutron [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Updating instance_info_cache with network_info: [{"id": "fd636137-6583-4c7a-937a-701561e4141a", "address": "fa:16:3e:82:54:fb", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.222", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd636137-65", "ovs_interfaceid": "fd636137-6583-4c7a-937a-701561e4141a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.347931] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508082, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.367878] env[69475]: DEBUG oslo_vmware.api [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508081, 'name': PowerOnVM_Task, 'duration_secs': 0.781325} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.368832] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.369197] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 807.369456] env[69475]: INFO nova.compute.manager [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Took 9.43 seconds to spawn the instance on the hypervisor. [ 807.369720] env[69475]: DEBUG nova.compute.manager [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 807.370717] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67aeadf-cc20-44c9-85d6-af282e10d0a3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.405952] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.818s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 807.406761] env[69475]: DEBUG nova.compute.manager [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 807.409709] env[69475]: DEBUG oslo_concurrency.lockutils [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.244s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.414022] env[69475]: INFO nova.compute.claims [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 807.418931] env[69475]: DEBUG nova.compute.manager [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 807.419631] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 807.420080] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468d9b13-7045-4f41-8bae-86718e915f49 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.428431] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 807.428788] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c6923e2-febb-4849-a738-7c0d4ba67736 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.437395] env[69475]: DEBUG oslo_vmware.api [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 807.437395] env[69475]: value = "task-3508083" [ 807.437395] env[69475]: _type = "Task" [ 807.437395] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.447538] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Releasing lock "refresh_cache-2b0cc71c-862e-4eb0-afc4-b2125003b087" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 807.448063] env[69475]: DEBUG nova.compute.manager [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Instance network_info: |[{"id": "05ec4e25-3c6d-4d4b-a353-a749c7ee1242", "address": "fa:16:3e:3e:78:bf", "network": {"id": "254ff636-2fcb-4d4c-b050-89948230fa0d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2128688610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1784f9c01de49c494bc44e0272c02cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05ec4e25-3c", "ovs_interfaceid": "05ec4e25-3c6d-4d4b-a353-a749c7ee1242", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 807.453195] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3e:78:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31ac3fea-ebf4-4bed-bf70-1eaecdf71280', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05ec4e25-3c6d-4d4b-a353-a749c7ee1242', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 807.460797] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 807.460992] env[69475]: DEBUG oslo_vmware.api [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3508083, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.463195] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 807.463195] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-97007ff5-8ded-4beb-88a1-c6dab5b74ccb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.489049] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 807.489049] env[69475]: value = "task-3508084" [ 807.489049] env[69475]: _type = "Task" [ 807.489049] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.499230] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508084, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.605317] env[69475]: DEBUG nova.compute.manager [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 807.605693] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 807.607225] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eedecebd-dbd1-48a4-849a-c3a3c0124c7d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.619886] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 807.620213] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6980f529-cef2-47a9-9349-4b2b37561122 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.630041] env[69475]: DEBUG oslo_vmware.api [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 807.630041] env[69475]: value = "task-3508085" [ 807.630041] env[69475]: _type = "Task" [ 807.630041] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.642684] env[69475]: DEBUG oslo_vmware.api [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3508085, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.677656] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Releasing lock "refresh_cache-4b3b53d1-82bf-40e7-9988-af7b51e9883a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 807.678052] env[69475]: DEBUG nova.objects.instance [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lazy-loading 'migration_context' on Instance uuid 4b3b53d1-82bf-40e7-9988-af7b51e9883a {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 807.722412] env[69475]: DEBUG nova.compute.manager [req-d50d6c73-14e0-4a1f-9883-b964bcd6c6a5 req-35f301ee-e542-4951-85eb-bae7beed10b9 service nova] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Received event network-changed-05ec4e25-3c6d-4d4b-a353-a749c7ee1242 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 807.722412] env[69475]: DEBUG nova.compute.manager [req-d50d6c73-14e0-4a1f-9883-b964bcd6c6a5 req-35f301ee-e542-4951-85eb-bae7beed10b9 service nova] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Refreshing instance network info cache due to event network-changed-05ec4e25-3c6d-4d4b-a353-a749c7ee1242. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 807.722686] env[69475]: DEBUG oslo_concurrency.lockutils [req-d50d6c73-14e0-4a1f-9883-b964bcd6c6a5 req-35f301ee-e542-4951-85eb-bae7beed10b9 service nova] Acquiring lock "refresh_cache-2b0cc71c-862e-4eb0-afc4-b2125003b087" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.722899] env[69475]: DEBUG oslo_concurrency.lockutils [req-d50d6c73-14e0-4a1f-9883-b964bcd6c6a5 req-35f301ee-e542-4951-85eb-bae7beed10b9 service nova] Acquired lock "refresh_cache-2b0cc71c-862e-4eb0-afc4-b2125003b087" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 807.723107] env[69475]: DEBUG nova.network.neutron [req-d50d6c73-14e0-4a1f-9883-b964bcd6c6a5 req-35f301ee-e542-4951-85eb-bae7beed10b9 service nova] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Refreshing network info cache for port 05ec4e25-3c6d-4d4b-a353-a749c7ee1242 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 807.849345] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508082, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.890314] env[69475]: INFO nova.compute.manager [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Took 37.73 seconds to build instance. [ 807.916675] env[69475]: DEBUG nova.compute.utils [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 807.923188] env[69475]: DEBUG nova.compute.manager [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 807.923188] env[69475]: DEBUG nova.network.neutron [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 807.952747] env[69475]: DEBUG oslo_vmware.api [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3508083, 'name': PowerOffVM_Task, 'duration_secs': 0.300948} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.953054] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 807.953285] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 807.953579] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b4933663-cdc7-489d-b3a3-8d8b86675193 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.977704] env[69475]: DEBUG nova.policy [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f164f821924c4f4aae565d7352fef4a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8ffeef220f04d9eb22ef69b68e9c34a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 808.001032] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508084, 'name': CreateVM_Task, 'duration_secs': 0.397919} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.001032] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 808.003825] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.003825] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.003825] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 808.003825] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7231ab0-5841-44ec-81db-a8947f69c424 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.011951] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 808.011951] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dd0e14-1677-e7a2-72bf-0265139b036d" [ 808.011951] env[69475]: _type = "Task" [ 808.011951] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.019980] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 808.020391] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 808.020697] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Deleting the datastore file [datastore1] 712e93b6-e797-4b9f-b39b-33373cede403 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 808.022256] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-353cafed-8c38-48be-8423-a5db877edd7c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.033545] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dd0e14-1677-e7a2-72bf-0265139b036d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.038451] env[69475]: DEBUG oslo_vmware.api [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 808.038451] env[69475]: value = "task-3508087" [ 808.038451] env[69475]: _type = "Task" [ 808.038451] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.051261] env[69475]: DEBUG oslo_vmware.api [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3508087, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.100137] env[69475]: DEBUG oslo_concurrency.lockutils [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "e8c2d21e-2e42-48de-928e-c5fd944899b6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.100554] env[69475]: DEBUG oslo_concurrency.lockutils [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e8c2d21e-2e42-48de-928e-c5fd944899b6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.142864] env[69475]: DEBUG oslo_vmware.api [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3508085, 'name': PowerOffVM_Task, 'duration_secs': 0.233926} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.143295] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 808.143810] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 808.144200] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3e6ab3dc-cb2e-4bf4-a540-f295416a7056 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.181443] env[69475]: DEBUG nova.objects.base [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Object Instance<4b3b53d1-82bf-40e7-9988-af7b51e9883a> lazy-loaded attributes: info_cache,migration_context {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 808.183094] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7496b7ab-c27c-409a-a661-c032fb4b9b91 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.207303] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10d2e905-27b0-451a-bb82-7e727baf4e2f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.214746] env[69475]: DEBUG oslo_vmware.api [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 808.214746] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523dbbc9-ec4b-f964-ff3f-f2bec65d8005" [ 808.214746] env[69475]: _type = "Task" [ 808.214746] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.219841] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 808.219841] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 808.219841] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Deleting the datastore file [datastore1] daef2117-0d9f-4c9e-99e7-1e8a65aa1f22 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 808.219841] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b2aec4b7-9f26-4479-a06f-2a18934a8a5e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.224219] env[69475]: DEBUG oslo_vmware.api [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523dbbc9-ec4b-f964-ff3f-f2bec65d8005, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.229254] env[69475]: DEBUG oslo_vmware.api [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for the task: (returnval){ [ 808.229254] env[69475]: value = "task-3508089" [ 808.229254] env[69475]: _type = "Task" [ 808.229254] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.239710] env[69475]: DEBUG oslo_vmware.api [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3508089, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.347643] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508082, 'name': CreateSnapshot_Task, 'duration_secs': 1.072253} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.348128] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 808.348854] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97ef589-041d-4279-83c4-01d1771bd05b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.394459] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad38bf93-0d86-4f03-be45-42cc7671c2cc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "86464a01-e034-43b6-a6d5-45f9e3b6715b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.884s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.422518] env[69475]: DEBUG nova.compute.manager [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 808.526148] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dd0e14-1677-e7a2-72bf-0265139b036d, 'name': SearchDatastore_Task, 'duration_secs': 0.014997} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.535023] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.535023] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 808.535023] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.535023] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.535023] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 808.535332] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-887cc190-b22e-4e27-8cb1-91d9d650505e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.552232] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 808.552232] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 808.552955] env[69475]: DEBUG oslo_vmware.api [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3508087, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.327761} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.553227] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8863956-4956-41bc-9875-9619ba7bff35 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.558749] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 808.558749] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 808.558749] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 808.558749] env[69475]: INFO nova.compute.manager [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Took 1.14 seconds to destroy the instance on the hypervisor. [ 808.558749] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 808.562193] env[69475]: DEBUG nova.compute.manager [-] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 808.562300] env[69475]: DEBUG nova.network.neutron [-] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 808.564925] env[69475]: DEBUG nova.network.neutron [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Successfully created port: 9a80c54f-962d-4eb2-a41a-ff95882d56a3 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 808.575020] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 808.575020] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d5eb2d-e200-4d1f-5ebb-6111c2b9636b" [ 808.575020] env[69475]: _type = "Task" [ 808.575020] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.581449] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d5eb2d-e200-4d1f-5ebb-6111c2b9636b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.727279] env[69475]: DEBUG oslo_vmware.api [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523dbbc9-ec4b-f964-ff3f-f2bec65d8005, 'name': SearchDatastore_Task, 'duration_secs': 0.023415} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.727563] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.738230] env[69475]: DEBUG nova.network.neutron [req-d50d6c73-14e0-4a1f-9883-b964bcd6c6a5 req-35f301ee-e542-4951-85eb-bae7beed10b9 service nova] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Updated VIF entry in instance network info cache for port 05ec4e25-3c6d-4d4b-a353-a749c7ee1242. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 808.738568] env[69475]: DEBUG nova.network.neutron [req-d50d6c73-14e0-4a1f-9883-b964bcd6c6a5 req-35f301ee-e542-4951-85eb-bae7beed10b9 service nova] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Updating instance_info_cache with network_info: [{"id": "05ec4e25-3c6d-4d4b-a353-a749c7ee1242", "address": "fa:16:3e:3e:78:bf", "network": {"id": "254ff636-2fcb-4d4c-b050-89948230fa0d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2128688610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1784f9c01de49c494bc44e0272c02cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05ec4e25-3c", "ovs_interfaceid": "05ec4e25-3c6d-4d4b-a353-a749c7ee1242", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 808.742856] env[69475]: DEBUG oslo_vmware.api [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Task: {'id': task-3508089, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.375336} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.743376] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 808.743569] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 808.743747] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 808.743917] env[69475]: INFO nova.compute.manager [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Took 1.14 seconds to destroy the instance on the hypervisor. [ 808.744198] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 808.744414] env[69475]: DEBUG nova.compute.manager [-] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 808.744497] env[69475]: DEBUG nova.network.neutron [-] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 808.753886] env[69475]: DEBUG oslo_concurrency.lockutils [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "2e7066ca-162e-4465-a9c1-5422510e4c0f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.754166] env[69475]: DEBUG oslo_concurrency.lockutils [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "2e7066ca-162e-4465-a9c1-5422510e4c0f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.754385] env[69475]: DEBUG oslo_concurrency.lockutils [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "2e7066ca-162e-4465-a9c1-5422510e4c0f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.754566] env[69475]: DEBUG oslo_concurrency.lockutils [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "2e7066ca-162e-4465-a9c1-5422510e4c0f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.754731] env[69475]: DEBUG oslo_concurrency.lockutils [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "2e7066ca-162e-4465-a9c1-5422510e4c0f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.758347] env[69475]: INFO nova.compute.manager [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Terminating instance [ 808.873720] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 808.883047] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-4f4e649d-1b7f-4050-b95b-8b90df10af72 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.896711] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 808.896711] env[69475]: value = "task-3508090" [ 808.896711] env[69475]: _type = "Task" [ 808.896711] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.900468] env[69475]: DEBUG nova.compute.manager [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 808.912435] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508090, 'name': CloneVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.086473] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d5eb2d-e200-4d1f-5ebb-6111c2b9636b, 'name': SearchDatastore_Task, 'duration_secs': 0.021589} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.087290] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bee43368-bee4-41f7-ada9-93690600db74 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.095994] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 809.095994] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d59840-8749-4cc1-770c-80e48685cf67" [ 809.095994] env[69475]: _type = "Task" [ 809.095994] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.103325] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d59840-8749-4cc1-770c-80e48685cf67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.107091] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fcf6d25-2e4d-4b7f-8164-57947440f7a5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.114205] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea0bc6a-dc41-4a1d-8bf7-4a9f36ec9d36 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.149836] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bed882f-398e-46b1-a6d6-7a6a5a7d25c1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.157526] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd459c13-55f6-41a9-95b6-cb1041151701 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.172870] env[69475]: DEBUG nova.compute.provider_tree [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 809.244264] env[69475]: DEBUG oslo_concurrency.lockutils [req-d50d6c73-14e0-4a1f-9883-b964bcd6c6a5 req-35f301ee-e542-4951-85eb-bae7beed10b9 service nova] Releasing lock "refresh_cache-2b0cc71c-862e-4eb0-afc4-b2125003b087" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.268308] env[69475]: DEBUG nova.compute.manager [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 809.268308] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 809.269555] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af8ea996-be94-43e6-a367-4fe9c471799e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.277997] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 809.279398] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-45435681-33c5-4074-a6a5-ab2700763f33 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.283582] env[69475]: DEBUG nova.compute.manager [req-fdd84f74-79ce-4c28-b41d-a5fc4a7cd070 req-2d1f90ed-e3ae-4105-8714-b25b71a0ae49 service nova] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Received event network-vif-deleted-792cf213-fbce-47e9-8e02-2c4aa6a06738 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 809.283582] env[69475]: INFO nova.compute.manager [req-fdd84f74-79ce-4c28-b41d-a5fc4a7cd070 req-2d1f90ed-e3ae-4105-8714-b25b71a0ae49 service nova] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Neutron deleted interface 792cf213-fbce-47e9-8e02-2c4aa6a06738; detaching it from the instance and deleting it from the info cache [ 809.283582] env[69475]: DEBUG nova.network.neutron [req-fdd84f74-79ce-4c28-b41d-a5fc4a7cd070 req-2d1f90ed-e3ae-4105-8714-b25b71a0ae49 service nova] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.289573] env[69475]: DEBUG oslo_vmware.api [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 809.289573] env[69475]: value = "task-3508091" [ 809.289573] env[69475]: _type = "Task" [ 809.289573] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.298940] env[69475]: DEBUG oslo_vmware.api [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508091, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.388778] env[69475]: DEBUG nova.network.neutron [-] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.411923] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508090, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.427785] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.437390] env[69475]: DEBUG nova.compute.manager [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 809.468565] env[69475]: DEBUG nova.virt.hardware [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 809.468885] env[69475]: DEBUG nova.virt.hardware [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 809.469138] env[69475]: DEBUG nova.virt.hardware [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 809.469416] env[69475]: DEBUG nova.virt.hardware [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 809.469641] env[69475]: DEBUG nova.virt.hardware [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 809.469857] env[69475]: DEBUG nova.virt.hardware [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 809.470210] env[69475]: DEBUG nova.virt.hardware [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 809.470373] env[69475]: DEBUG nova.virt.hardware [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 809.470650] env[69475]: DEBUG nova.virt.hardware [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 809.470911] env[69475]: DEBUG nova.virt.hardware [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 809.471147] env[69475]: DEBUG nova.virt.hardware [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 809.472222] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80c02def-5b6d-482d-ad9f-a43ce279e77e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.483251] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1292cbd9-b677-4f86-8409-6ea774c319d6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.606265] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d59840-8749-4cc1-770c-80e48685cf67, 'name': SearchDatastore_Task, 'duration_secs': 0.016201} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.606469] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.606705] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 2b0cc71c-862e-4eb0-afc4-b2125003b087/2b0cc71c-862e-4eb0-afc4-b2125003b087.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 809.607026] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-791fcef7-e438-445c-8e81-cdc6efc8c7b1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.613220] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 809.613220] env[69475]: value = "task-3508092" [ 809.613220] env[69475]: _type = "Task" [ 809.613220] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.621475] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508092, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.675824] env[69475]: DEBUG nova.scheduler.client.report [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 809.785289] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-175ba02d-3173-46db-a785-a0d5a8734920 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.798481] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-686c0b2b-e859-44c5-98ed-6e657671ed5f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.816036] env[69475]: DEBUG nova.network.neutron [-] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.817242] env[69475]: DEBUG oslo_vmware.api [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508091, 'name': PowerOffVM_Task, 'duration_secs': 0.251131} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.820913] env[69475]: DEBUG nova.compute.manager [req-e4d224de-44fd-45a7-9368-4a0448ff0388 req-171537ce-5dbd-4322-85dd-a6a88ca25501 service nova] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Received event network-vif-deleted-74bc91c0-20e1-4de1-8433-333a88443441 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 809.820913] env[69475]: INFO nova.compute.manager [req-e4d224de-44fd-45a7-9368-4a0448ff0388 req-171537ce-5dbd-4322-85dd-a6a88ca25501 service nova] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Neutron deleted interface 74bc91c0-20e1-4de1-8433-333a88443441; detaching it from the instance and deleting it from the info cache [ 809.820913] env[69475]: DEBUG nova.network.neutron [req-e4d224de-44fd-45a7-9368-4a0448ff0388 req-171537ce-5dbd-4322-85dd-a6a88ca25501 service nova] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.824748] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 809.824902] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 809.827028] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e9733d9-a0f1-42e6-96f7-cca0d61f868a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.850066] env[69475]: DEBUG nova.compute.manager [req-fdd84f74-79ce-4c28-b41d-a5fc4a7cd070 req-2d1f90ed-e3ae-4105-8714-b25b71a0ae49 service nova] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Detach interface failed, port_id=792cf213-fbce-47e9-8e02-2c4aa6a06738, reason: Instance 712e93b6-e797-4b9f-b39b-33373cede403 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 809.891275] env[69475]: INFO nova.compute.manager [-] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Took 1.33 seconds to deallocate network for instance. [ 809.909793] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508090, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.926729] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 809.927075] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 809.927418] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Deleting the datastore file [datastore2] 2e7066ca-162e-4465-a9c1-5422510e4c0f {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 809.927644] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ff0867ac-2dcf-4a91-be22-8afa228f2e78 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.934623] env[69475]: DEBUG oslo_vmware.api [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 809.934623] env[69475]: value = "task-3508094" [ 809.934623] env[69475]: _type = "Task" [ 809.934623] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.945173] env[69475]: DEBUG oslo_vmware.api [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508094, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.123899] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508092, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.181509] env[69475]: DEBUG oslo_concurrency.lockutils [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.772s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 810.182232] env[69475]: DEBUG nova.compute.manager [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 810.185106] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.795s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 810.186867] env[69475]: INFO nova.compute.claims [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 810.323365] env[69475]: INFO nova.compute.manager [-] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Took 1.58 seconds to deallocate network for instance. [ 810.327899] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01f3e28d-2ae5-4a5d-b967-826036c716f1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.339238] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8e23fb-e11d-4620-a115-8a097f8bec93 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.386131] env[69475]: DEBUG nova.compute.manager [req-e4d224de-44fd-45a7-9368-4a0448ff0388 req-171537ce-5dbd-4322-85dd-a6a88ca25501 service nova] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Detach interface failed, port_id=74bc91c0-20e1-4de1-8433-333a88443441, reason: Instance daef2117-0d9f-4c9e-99e7-1e8a65aa1f22 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 810.404515] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.412379] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508090, 'name': CloneVM_Task, 'duration_secs': 1.366149} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.412849] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Created linked-clone VM from snapshot [ 810.413951] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f9a093-a5e5-4807-8be5-025e71cbcaa1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.423868] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Uploading image e248cf7a-f2b1-4f73-b442-4f4396e08e5b {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 810.452994] env[69475]: DEBUG oslo_vmware.api [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508094, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.435218} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.457594] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 810.457842] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 810.458104] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 810.458331] env[69475]: INFO nova.compute.manager [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Took 1.19 seconds to destroy the instance on the hypervisor. [ 810.458600] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 810.459052] env[69475]: DEBUG nova.compute.manager [-] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 810.459155] env[69475]: DEBUG nova.network.neutron [-] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 810.465259] env[69475]: DEBUG oslo_vmware.rw_handles [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 810.465259] env[69475]: value = "vm-700985" [ 810.465259] env[69475]: _type = "VirtualMachine" [ 810.465259] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 810.465567] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-79a0fbe6-2297-47af-bc4b-03630657eef8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.473209] env[69475]: DEBUG oslo_vmware.rw_handles [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lease: (returnval){ [ 810.473209] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521e0dbb-63eb-cbfc-8fc4-1a67d3ed09ad" [ 810.473209] env[69475]: _type = "HttpNfcLease" [ 810.473209] env[69475]: } obtained for exporting VM: (result){ [ 810.473209] env[69475]: value = "vm-700985" [ 810.473209] env[69475]: _type = "VirtualMachine" [ 810.473209] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 810.474488] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the lease: (returnval){ [ 810.474488] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521e0dbb-63eb-cbfc-8fc4-1a67d3ed09ad" [ 810.474488] env[69475]: _type = "HttpNfcLease" [ 810.474488] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 810.481604] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 810.481604] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521e0dbb-63eb-cbfc-8fc4-1a67d3ed09ad" [ 810.481604] env[69475]: _type = "HttpNfcLease" [ 810.481604] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 810.625761] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508092, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576378} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.626451] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 2b0cc71c-862e-4eb0-afc4-b2125003b087/2b0cc71c-862e-4eb0-afc4-b2125003b087.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 810.626693] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 810.626834] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1824d7aa-0dba-448a-ad92-48231357c92c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.634207] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 810.634207] env[69475]: value = "task-3508096" [ 810.634207] env[69475]: _type = "Task" [ 810.634207] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.644573] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508096, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.691961] env[69475]: DEBUG nova.compute.utils [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 810.696319] env[69475]: DEBUG nova.compute.manager [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 810.696595] env[69475]: DEBUG nova.network.neutron [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 810.745584] env[69475]: DEBUG nova.policy [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '52e0b5ed347744ec8a9a1c432c741814', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6fe52710b9d1461ea46698c9cf7bafb2', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 810.829610] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 810.983506] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 810.983506] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521e0dbb-63eb-cbfc-8fc4-1a67d3ed09ad" [ 810.983506] env[69475]: _type = "HttpNfcLease" [ 810.983506] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 810.983900] env[69475]: DEBUG oslo_vmware.rw_handles [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 810.983900] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521e0dbb-63eb-cbfc-8fc4-1a67d3ed09ad" [ 810.983900] env[69475]: _type = "HttpNfcLease" [ 810.983900] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 810.984722] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581eb24b-dc61-4ef3-9534-501e4d3d5e4b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.998478] env[69475]: DEBUG oslo_vmware.rw_handles [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f6ab87-262b-e083-4bf7-c05c9f2e9ce7/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 811.001478] env[69475]: DEBUG oslo_vmware.rw_handles [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f6ab87-262b-e083-4bf7-c05c9f2e9ce7/disk-0.vmdk for reading. {{(pid=69475) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 811.066416] env[69475]: DEBUG nova.network.neutron [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Successfully created port: 60c1a160-2445-460f-a1ab-ee86bd91a07c {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 811.130621] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d430a3a0-b59a-4471-8ac5-4d680535449e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.149823] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508096, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067728} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.150262] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 811.151819] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335f68bf-83cf-4c4b-9ed3-0106727b10fb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.186743] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] 2b0cc71c-862e-4eb0-afc4-b2125003b087/2b0cc71c-862e-4eb0-afc4-b2125003b087.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 811.191061] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a0ec029a-92a6-4ecb-9b00-026ec4248df9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.206181] env[69475]: DEBUG nova.network.neutron [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Successfully updated port: 9a80c54f-962d-4eb2-a41a-ff95882d56a3 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 811.208210] env[69475]: DEBUG nova.compute.manager [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 811.232022] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 811.232022] env[69475]: value = "task-3508097" [ 811.232022] env[69475]: _type = "Task" [ 811.232022] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.244206] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508097, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.584615] env[69475]: DEBUG nova.compute.manager [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Stashing vm_state: active {{(pid=69475) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 811.725684] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "refresh_cache-e960f967-d693-4ea8-9390-8b0232941c58" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.725875] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "refresh_cache-e960f967-d693-4ea8-9390-8b0232941c58" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 811.726218] env[69475]: DEBUG nova.network.neutron [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 811.727558] env[69475]: DEBUG nova.network.neutron [-] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.750263] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508097, 'name': ReconfigVM_Task, 'duration_secs': 0.443039} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.750697] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Reconfigured VM instance instance-00000036 to attach disk [datastore2] 2b0cc71c-862e-4eb0-afc4-b2125003b087/2b0cc71c-862e-4eb0-afc4-b2125003b087.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 811.751543] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-541a4a80-dd60-4589-bab7-a4049396c95c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.761501] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 811.761501] env[69475]: value = "task-3508098" [ 811.761501] env[69475]: _type = "Task" [ 811.761501] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.771584] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508098, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.875385] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c048105c-bf00-4ba5-8922-46bd11115449 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.883952] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6224ee82-423b-40f0-b57f-d86c5a8f92ce {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.919411] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc5f83ec-ff00-4b3d-97ee-9654beea58d0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.924973] env[69475]: DEBUG nova.compute.manager [req-a12468d7-c9ac-4b22-ab2f-e798d1ab0581 req-9f458044-c215-4fa2-9670-ad41020440b2 service nova] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Received event network-vif-plugged-9a80c54f-962d-4eb2-a41a-ff95882d56a3 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 811.925326] env[69475]: DEBUG oslo_concurrency.lockutils [req-a12468d7-c9ac-4b22-ab2f-e798d1ab0581 req-9f458044-c215-4fa2-9670-ad41020440b2 service nova] Acquiring lock "e960f967-d693-4ea8-9390-8b0232941c58-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 811.925653] env[69475]: DEBUG oslo_concurrency.lockutils [req-a12468d7-c9ac-4b22-ab2f-e798d1ab0581 req-9f458044-c215-4fa2-9670-ad41020440b2 service nova] Lock "e960f967-d693-4ea8-9390-8b0232941c58-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.925860] env[69475]: DEBUG oslo_concurrency.lockutils [req-a12468d7-c9ac-4b22-ab2f-e798d1ab0581 req-9f458044-c215-4fa2-9670-ad41020440b2 service nova] Lock "e960f967-d693-4ea8-9390-8b0232941c58-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.926076] env[69475]: DEBUG nova.compute.manager [req-a12468d7-c9ac-4b22-ab2f-e798d1ab0581 req-9f458044-c215-4fa2-9670-ad41020440b2 service nova] [instance: e960f967-d693-4ea8-9390-8b0232941c58] No waiting events found dispatching network-vif-plugged-9a80c54f-962d-4eb2-a41a-ff95882d56a3 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 811.926308] env[69475]: WARNING nova.compute.manager [req-a12468d7-c9ac-4b22-ab2f-e798d1ab0581 req-9f458044-c215-4fa2-9670-ad41020440b2 service nova] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Received unexpected event network-vif-plugged-9a80c54f-962d-4eb2-a41a-ff95882d56a3 for instance with vm_state building and task_state spawning. [ 811.926507] env[69475]: DEBUG nova.compute.manager [req-a12468d7-c9ac-4b22-ab2f-e798d1ab0581 req-9f458044-c215-4fa2-9670-ad41020440b2 service nova] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Received event network-changed-9a80c54f-962d-4eb2-a41a-ff95882d56a3 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 811.926684] env[69475]: DEBUG nova.compute.manager [req-a12468d7-c9ac-4b22-ab2f-e798d1ab0581 req-9f458044-c215-4fa2-9670-ad41020440b2 service nova] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Refreshing instance network info cache due to event network-changed-9a80c54f-962d-4eb2-a41a-ff95882d56a3. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 811.926918] env[69475]: DEBUG oslo_concurrency.lockutils [req-a12468d7-c9ac-4b22-ab2f-e798d1ab0581 req-9f458044-c215-4fa2-9670-ad41020440b2 service nova] Acquiring lock "refresh_cache-e960f967-d693-4ea8-9390-8b0232941c58" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.933792] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1404fa42-b6f2-4a61-aeb2-3b43ae2a8887 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.949642] env[69475]: DEBUG nova.compute.provider_tree [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 812.103500] env[69475]: DEBUG oslo_concurrency.lockutils [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.229741] env[69475]: DEBUG nova.compute.manager [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 812.234573] env[69475]: INFO nova.compute.manager [-] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Took 1.78 seconds to deallocate network for instance. [ 812.272448] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508098, 'name': Rename_Task, 'duration_secs': 0.229483} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.273922] env[69475]: DEBUG nova.network.neutron [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 812.275735] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 812.275735] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cef28b45-810c-4895-9fbc-6017437d96f7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.283913] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 812.283913] env[69475]: value = "task-3508099" [ 812.283913] env[69475]: _type = "Task" [ 812.283913] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.294930] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508099, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.363803] env[69475]: DEBUG nova.virt.hardware [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 812.364282] env[69475]: DEBUG nova.virt.hardware [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 812.365634] env[69475]: DEBUG nova.virt.hardware [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 812.366173] env[69475]: DEBUG nova.virt.hardware [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 812.366370] env[69475]: DEBUG nova.virt.hardware [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 812.366547] env[69475]: DEBUG nova.virt.hardware [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 812.367294] env[69475]: DEBUG nova.virt.hardware [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 812.367637] env[69475]: DEBUG nova.virt.hardware [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 812.368859] env[69475]: DEBUG nova.virt.hardware [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 812.368859] env[69475]: DEBUG nova.virt.hardware [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 812.368859] env[69475]: DEBUG nova.virt.hardware [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 812.370570] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c2009c-ca13-439b-bfe5-edef80177f67 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.383467] env[69475]: DEBUG oslo_vmware.rw_handles [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5292ca1a-da0b-745a-1673-f4430065fe5e/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 812.384906] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a314e5-10ba-43c1-bc93-b1f1d0258142 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.389686] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ad2168-b66c-4d4e-8a72-18d93fff7c63 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.406721] env[69475]: DEBUG oslo_vmware.rw_handles [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5292ca1a-da0b-745a-1673-f4430065fe5e/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 812.407007] env[69475]: ERROR oslo_vmware.rw_handles [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5292ca1a-da0b-745a-1673-f4430065fe5e/disk-0.vmdk due to incomplete transfer. [ 812.410799] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5693a956-dc95-4f98-8625-6d9be87be532 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.419927] env[69475]: DEBUG oslo_vmware.rw_handles [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5292ca1a-da0b-745a-1673-f4430065fe5e/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 812.420166] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Uploaded image 7b1de553-c880-4e56-a4a1-1df09903c04b to the Glance image server {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 812.422478] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 812.422833] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0d555f2b-4981-4443-a53d-612d99dfc320 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.430183] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 812.430183] env[69475]: value = "task-3508100" [ 812.430183] env[69475]: _type = "Task" [ 812.430183] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.440188] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508100, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.474998] env[69475]: ERROR nova.scheduler.client.report [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [req-d24f1c36-daa0-47b8-a069-e21e77dc0733] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dd221100-68c1-4a75-92b5-b24d81fee5da. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d24f1c36-daa0-47b8-a069-e21e77dc0733"}]} [ 812.496555] env[69475]: DEBUG nova.scheduler.client.report [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Refreshing inventories for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 812.518097] env[69475]: DEBUG nova.scheduler.client.report [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Updating ProviderTree inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 812.518436] env[69475]: DEBUG nova.compute.provider_tree [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 812.535470] env[69475]: DEBUG nova.scheduler.client.report [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Refreshing aggregate associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, aggregates: None {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 812.538808] env[69475]: DEBUG nova.network.neutron [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Updating instance_info_cache with network_info: [{"id": "9a80c54f-962d-4eb2-a41a-ff95882d56a3", "address": "fa:16:3e:9a:35:ca", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a80c54f-96", "ovs_interfaceid": "9a80c54f-962d-4eb2-a41a-ff95882d56a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.559323] env[69475]: DEBUG nova.scheduler.client.report [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Refreshing trait associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 812.710572] env[69475]: DEBUG nova.network.neutron [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Successfully updated port: 60c1a160-2445-460f-a1ab-ee86bd91a07c {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 812.744944] env[69475]: DEBUG oslo_concurrency.lockutils [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.807518] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508099, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.834600] env[69475]: DEBUG nova.compute.manager [req-4365faaf-c943-400a-a4ab-1c4b132be395 req-dba11935-3919-4e23-9d9b-d493ca0a788e service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Received event network-vif-plugged-60c1a160-2445-460f-a1ab-ee86bd91a07c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 812.835883] env[69475]: DEBUG oslo_concurrency.lockutils [req-4365faaf-c943-400a-a4ab-1c4b132be395 req-dba11935-3919-4e23-9d9b-d493ca0a788e service nova] Acquiring lock "5e3e57c5-8367-493f-8268-a0e496c8c878-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 812.835883] env[69475]: DEBUG oslo_concurrency.lockutils [req-4365faaf-c943-400a-a4ab-1c4b132be395 req-dba11935-3919-4e23-9d9b-d493ca0a788e service nova] Lock "5e3e57c5-8367-493f-8268-a0e496c8c878-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 812.835883] env[69475]: DEBUG oslo_concurrency.lockutils [req-4365faaf-c943-400a-a4ab-1c4b132be395 req-dba11935-3919-4e23-9d9b-d493ca0a788e service nova] Lock "5e3e57c5-8367-493f-8268-a0e496c8c878-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 812.835883] env[69475]: DEBUG nova.compute.manager [req-4365faaf-c943-400a-a4ab-1c4b132be395 req-dba11935-3919-4e23-9d9b-d493ca0a788e service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] No waiting events found dispatching network-vif-plugged-60c1a160-2445-460f-a1ab-ee86bd91a07c {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 812.835883] env[69475]: WARNING nova.compute.manager [req-4365faaf-c943-400a-a4ab-1c4b132be395 req-dba11935-3919-4e23-9d9b-d493ca0a788e service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Received unexpected event network-vif-plugged-60c1a160-2445-460f-a1ab-ee86bd91a07c for instance with vm_state building and task_state spawning. [ 812.951428] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508100, 'name': Destroy_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.042669] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "refresh_cache-e960f967-d693-4ea8-9390-8b0232941c58" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 813.043225] env[69475]: DEBUG nova.compute.manager [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Instance network_info: |[{"id": "9a80c54f-962d-4eb2-a41a-ff95882d56a3", "address": "fa:16:3e:9a:35:ca", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a80c54f-96", "ovs_interfaceid": "9a80c54f-962d-4eb2-a41a-ff95882d56a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 813.044384] env[69475]: DEBUG oslo_concurrency.lockutils [req-a12468d7-c9ac-4b22-ab2f-e798d1ab0581 req-9f458044-c215-4fa2-9670-ad41020440b2 service nova] Acquired lock "refresh_cache-e960f967-d693-4ea8-9390-8b0232941c58" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 813.044384] env[69475]: DEBUG nova.network.neutron [req-a12468d7-c9ac-4b22-ab2f-e798d1ab0581 req-9f458044-c215-4fa2-9670-ad41020440b2 service nova] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Refreshing network info cache for port 9a80c54f-962d-4eb2-a41a-ff95882d56a3 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 813.045317] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9a:35:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a80c54f-962d-4eb2-a41a-ff95882d56a3', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 813.053218] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 813.054555] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 813.054555] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3914dd84-d85e-4630-9785-88444bb06915 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.078170] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 813.078170] env[69475]: value = "task-3508101" [ 813.078170] env[69475]: _type = "Task" [ 813.078170] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.089240] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508101, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.152101] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6c90515-b4d0-4f98-b265-bcaa27101555 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.160375] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1c72b5-94a2-4678-851c-5e5dbe9639be {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.194323] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb4cde06-1b15-4cb0-ac1b-119190b6083c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.202234] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fd8bcdd-027a-4e7c-b5ed-2c0045199428 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.217948] env[69475]: DEBUG oslo_concurrency.lockutils [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "refresh_cache-5e3e57c5-8367-493f-8268-a0e496c8c878" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.218097] env[69475]: DEBUG oslo_concurrency.lockutils [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquired lock "refresh_cache-5e3e57c5-8367-493f-8268-a0e496c8c878" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 813.218244] env[69475]: DEBUG nova.network.neutron [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 813.219504] env[69475]: DEBUG nova.compute.provider_tree [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 813.296365] env[69475]: DEBUG oslo_vmware.api [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508099, 'name': PowerOnVM_Task, 'duration_secs': 0.781568} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.296629] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 813.296885] env[69475]: INFO nova.compute.manager [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Took 8.95 seconds to spawn the instance on the hypervisor. [ 813.297083] env[69475]: DEBUG nova.compute.manager [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 813.297891] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbe4d397-ea9a-4610-830b-2b6b3df973a5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.440148] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508100, 'name': Destroy_Task, 'duration_secs': 0.677901} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.440447] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Destroyed the VM [ 813.440724] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 813.441011] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-551be0d5-a290-4f5c-8d1e-fb35e6789dd8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.448424] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 813.448424] env[69475]: value = "task-3508102" [ 813.448424] env[69475]: _type = "Task" [ 813.448424] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.455828] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508102, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.588902] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508101, 'name': CreateVM_Task, 'duration_secs': 0.34076} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.591392] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 813.592349] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.592584] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 813.592997] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 813.593600] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cc3046a-30ac-4a5b-8b50-3373e133c819 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.599238] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 813.599238] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52227818-7eac-7564-2d81-e1c4ac1aa12f" [ 813.599238] env[69475]: _type = "Task" [ 813.599238] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.608667] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52227818-7eac-7564-2d81-e1c4ac1aa12f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.762308] env[69475]: DEBUG nova.scheduler.client.report [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Updated inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da with generation 81 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 813.762616] env[69475]: DEBUG nova.compute.provider_tree [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Updating resource provider dd221100-68c1-4a75-92b5-b24d81fee5da generation from 81 to 82 during operation: update_inventory {{(pid=69475) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 813.762856] env[69475]: DEBUG nova.compute.provider_tree [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 813.786324] env[69475]: DEBUG nova.network.neutron [req-a12468d7-c9ac-4b22-ab2f-e798d1ab0581 req-9f458044-c215-4fa2-9670-ad41020440b2 service nova] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Updated VIF entry in instance network info cache for port 9a80c54f-962d-4eb2-a41a-ff95882d56a3. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 813.786706] env[69475]: DEBUG nova.network.neutron [req-a12468d7-c9ac-4b22-ab2f-e798d1ab0581 req-9f458044-c215-4fa2-9670-ad41020440b2 service nova] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Updating instance_info_cache with network_info: [{"id": "9a80c54f-962d-4eb2-a41a-ff95882d56a3", "address": "fa:16:3e:9a:35:ca", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a80c54f-96", "ovs_interfaceid": "9a80c54f-962d-4eb2-a41a-ff95882d56a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.790499] env[69475]: DEBUG nova.network.neutron [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 813.814937] env[69475]: INFO nova.compute.manager [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Took 41.87 seconds to build instance. [ 813.958917] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508102, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.100799] env[69475]: DEBUG nova.network.neutron [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Updating instance_info_cache with network_info: [{"id": "60c1a160-2445-460f-a1ab-ee86bd91a07c", "address": "fa:16:3e:cf:c8:34", "network": {"id": "4ef94f8a-061f-46ae-9a81-2ea0b3db007a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1623741139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "6fe52710b9d1461ea46698c9cf7bafb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad36dd36-1d2c-4f37-a259-98ef2e440794", "external-id": "nsx-vlan-transportzone-479", "segmentation_id": 479, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60c1a160-24", "ovs_interfaceid": "60c1a160-2445-460f-a1ab-ee86bd91a07c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.117062] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52227818-7eac-7564-2d81-e1c4ac1aa12f, 'name': SearchDatastore_Task, 'duration_secs': 0.015287} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.117062] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 814.117062] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 814.117062] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.117062] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.117062] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 814.117062] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-37f6f164-e4dc-43c7-9466-0645f7fc8fea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.126719] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 814.126862] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 814.128158] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3378ec11-6a8b-4d42-a286-be6723280afc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.133549] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 814.133549] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f539c2-7eb5-449d-9e4a-ef78eb4e4d0b" [ 814.133549] env[69475]: _type = "Task" [ 814.133549] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.142175] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f539c2-7eb5-449d-9e4a-ef78eb4e4d0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.271153] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.085s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.271153] env[69475]: DEBUG nova.compute.manager [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 814.273911] env[69475]: DEBUG oslo_concurrency.lockutils [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.778s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.274783] env[69475]: INFO nova.compute.claims [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 814.289225] env[69475]: DEBUG oslo_concurrency.lockutils [req-a12468d7-c9ac-4b22-ab2f-e798d1ab0581 req-9f458044-c215-4fa2-9670-ad41020440b2 service nova] Releasing lock "refresh_cache-e960f967-d693-4ea8-9390-8b0232941c58" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 814.289584] env[69475]: DEBUG nova.compute.manager [req-a12468d7-c9ac-4b22-ab2f-e798d1ab0581 req-9f458044-c215-4fa2-9670-ad41020440b2 service nova] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Received event network-vif-deleted-eeaf90cd-47e0-4b13-b5a2-efabd98551b7 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 814.316921] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c1333733-774b-403c-a2b4-3c247e09ad9b tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "2b0cc71c-862e-4eb0-afc4-b2125003b087" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.896s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.459875] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508102, 'name': RemoveSnapshot_Task} progress is 74%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.488328] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b3971b83-ade8-4b15-9a4f-630b46b162f6 tempest-ServersAdminTestJSON-1695620084 tempest-ServersAdminTestJSON-1695620084-project-admin] Acquiring lock "refresh_cache-2b0cc71c-862e-4eb0-afc4-b2125003b087" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.488438] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b3971b83-ade8-4b15-9a4f-630b46b162f6 tempest-ServersAdminTestJSON-1695620084 tempest-ServersAdminTestJSON-1695620084-project-admin] Acquired lock "refresh_cache-2b0cc71c-862e-4eb0-afc4-b2125003b087" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.488553] env[69475]: DEBUG nova.network.neutron [None req-b3971b83-ade8-4b15-9a4f-630b46b162f6 tempest-ServersAdminTestJSON-1695620084 tempest-ServersAdminTestJSON-1695620084-project-admin] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 814.607531] env[69475]: DEBUG oslo_concurrency.lockutils [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Releasing lock "refresh_cache-5e3e57c5-8367-493f-8268-a0e496c8c878" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 814.608031] env[69475]: DEBUG nova.compute.manager [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Instance network_info: |[{"id": "60c1a160-2445-460f-a1ab-ee86bd91a07c", "address": "fa:16:3e:cf:c8:34", "network": {"id": "4ef94f8a-061f-46ae-9a81-2ea0b3db007a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1623741139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "6fe52710b9d1461ea46698c9cf7bafb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad36dd36-1d2c-4f37-a259-98ef2e440794", "external-id": "nsx-vlan-transportzone-479", "segmentation_id": 479, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60c1a160-24", "ovs_interfaceid": "60c1a160-2445-460f-a1ab-ee86bd91a07c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 814.608348] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:c8:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ad36dd36-1d2c-4f37-a259-98ef2e440794', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '60c1a160-2445-460f-a1ab-ee86bd91a07c', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 814.616658] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 814.616914] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 814.617171] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a44c97cf-1223-4d47-a67c-5e2a6d7294c7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.640452] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 814.640452] env[69475]: value = "task-3508103" [ 814.640452] env[69475]: _type = "Task" [ 814.640452] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.644143] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f539c2-7eb5-449d-9e4a-ef78eb4e4d0b, 'name': SearchDatastore_Task, 'duration_secs': 0.027623} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.647705] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb407def-ebd4-497a-b023-e1ef3361932c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.655467] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508103, 'name': CreateVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.656935] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 814.656935] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521a44c6-cd95-b809-2876-2dc879f6ae27" [ 814.656935] env[69475]: _type = "Task" [ 814.656935] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.665376] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521a44c6-cd95-b809-2876-2dc879f6ae27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.780136] env[69475]: DEBUG nova.compute.utils [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 814.784757] env[69475]: DEBUG nova.compute.manager [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 814.784980] env[69475]: DEBUG nova.network.neutron [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 814.820577] env[69475]: DEBUG nova.compute.manager [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 814.927712] env[69475]: DEBUG nova.policy [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba09f56e4fda4fc99602796a0af6cb33', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e87670cfd2b848af98507a5ebf9fab51', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 814.959534] env[69475]: DEBUG oslo_vmware.api [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508102, 'name': RemoveSnapshot_Task, 'duration_secs': 1.054931} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.959933] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 814.960269] env[69475]: INFO nova.compute.manager [None req-374d68dc-41bf-48e2-918b-9cb95bc95ad9 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Took 16.93 seconds to snapshot the instance on the hypervisor. [ 815.020833] env[69475]: DEBUG nova.compute.manager [req-eb8f762c-d6bb-474d-bed0-93fad65631ea req-34988145-aa3a-4de6-81ea-b2677f076343 service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Received event network-changed-60c1a160-2445-460f-a1ab-ee86bd91a07c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 815.020947] env[69475]: DEBUG nova.compute.manager [req-eb8f762c-d6bb-474d-bed0-93fad65631ea req-34988145-aa3a-4de6-81ea-b2677f076343 service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Refreshing instance network info cache due to event network-changed-60c1a160-2445-460f-a1ab-ee86bd91a07c. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 815.021166] env[69475]: DEBUG oslo_concurrency.lockutils [req-eb8f762c-d6bb-474d-bed0-93fad65631ea req-34988145-aa3a-4de6-81ea-b2677f076343 service nova] Acquiring lock "refresh_cache-5e3e57c5-8367-493f-8268-a0e496c8c878" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.021306] env[69475]: DEBUG oslo_concurrency.lockutils [req-eb8f762c-d6bb-474d-bed0-93fad65631ea req-34988145-aa3a-4de6-81ea-b2677f076343 service nova] Acquired lock "refresh_cache-5e3e57c5-8367-493f-8268-a0e496c8c878" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 815.021459] env[69475]: DEBUG nova.network.neutron [req-eb8f762c-d6bb-474d-bed0-93fad65631ea req-34988145-aa3a-4de6-81ea-b2677f076343 service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Refreshing network info cache for port 60c1a160-2445-460f-a1ab-ee86bd91a07c {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 815.158574] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508103, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.172500] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521a44c6-cd95-b809-2876-2dc879f6ae27, 'name': SearchDatastore_Task, 'duration_secs': 0.013344} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.172791] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.173073] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] e960f967-d693-4ea8-9390-8b0232941c58/e960f967-d693-4ea8-9390-8b0232941c58.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 815.173482] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-926c2cb1-809e-4b53-897c-278abc039b1c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.179394] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 815.179394] env[69475]: value = "task-3508104" [ 815.179394] env[69475]: _type = "Task" [ 815.179394] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.192111] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508104, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.231534] env[69475]: DEBUG nova.network.neutron [None req-b3971b83-ade8-4b15-9a4f-630b46b162f6 tempest-ServersAdminTestJSON-1695620084 tempest-ServersAdminTestJSON-1695620084-project-admin] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Updating instance_info_cache with network_info: [{"id": "05ec4e25-3c6d-4d4b-a353-a749c7ee1242", "address": "fa:16:3e:3e:78:bf", "network": {"id": "254ff636-2fcb-4d4c-b050-89948230fa0d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2128688610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1784f9c01de49c494bc44e0272c02cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05ec4e25-3c", "ovs_interfaceid": "05ec4e25-3c6d-4d4b-a353-a749c7ee1242", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.286218] env[69475]: DEBUG nova.compute.manager [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 815.338800] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.561206] env[69475]: DEBUG nova.network.neutron [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Successfully created port: ed004f95-f0d0-434e-a13d-54bff688d74e {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 815.664980] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508103, 'name': CreateVM_Task, 'duration_secs': 0.527667} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.665252] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 815.669229] env[69475]: DEBUG oslo_concurrency.lockutils [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.669229] env[69475]: DEBUG oslo_concurrency.lockutils [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 815.669229] env[69475]: DEBUG oslo_concurrency.lockutils [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 815.669229] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43bf43c9-a8e1-4a6c-b206-9d757cd263ca {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.677270] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 815.677270] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523e961e-489b-aa9e-208d-6a59d399d38b" [ 815.677270] env[69475]: _type = "Task" [ 815.677270] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.690120] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523e961e-489b-aa9e-208d-6a59d399d38b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.693673] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508104, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.736408] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b3971b83-ade8-4b15-9a4f-630b46b162f6 tempest-ServersAdminTestJSON-1695620084 tempest-ServersAdminTestJSON-1695620084-project-admin] Releasing lock "refresh_cache-2b0cc71c-862e-4eb0-afc4-b2125003b087" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.736746] env[69475]: DEBUG nova.compute.manager [None req-b3971b83-ade8-4b15-9a4f-630b46b162f6 tempest-ServersAdminTestJSON-1695620084 tempest-ServersAdminTestJSON-1695620084-project-admin] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Inject network info {{(pid=69475) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 815.737087] env[69475]: DEBUG nova.compute.manager [None req-b3971b83-ade8-4b15-9a4f-630b46b162f6 tempest-ServersAdminTestJSON-1695620084 tempest-ServersAdminTestJSON-1695620084-project-admin] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] network_info to inject: |[{"id": "05ec4e25-3c6d-4d4b-a353-a749c7ee1242", "address": "fa:16:3e:3e:78:bf", "network": {"id": "254ff636-2fcb-4d4c-b050-89948230fa0d", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-2128688610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f1784f9c01de49c494bc44e0272c02cf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "31ac3fea-ebf4-4bed-bf70-1eaecdf71280", "external-id": "nsx-vlan-transportzone-489", "segmentation_id": 489, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05ec4e25-3c", "ovs_interfaceid": "05ec4e25-3c6d-4d4b-a353-a749c7ee1242", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 815.743835] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b3971b83-ade8-4b15-9a4f-630b46b162f6 tempest-ServersAdminTestJSON-1695620084 tempest-ServersAdminTestJSON-1695620084-project-admin] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Reconfiguring VM instance to set the machine id {{(pid=69475) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 815.744543] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28aa0083-3756-4492-8729-14a3fbc9fe98 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.768034] env[69475]: DEBUG oslo_vmware.api [None req-b3971b83-ade8-4b15-9a4f-630b46b162f6 tempest-ServersAdminTestJSON-1695620084 tempest-ServersAdminTestJSON-1695620084-project-admin] Waiting for the task: (returnval){ [ 815.768034] env[69475]: value = "task-3508105" [ 815.768034] env[69475]: _type = "Task" [ 815.768034] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.781016] env[69475]: DEBUG oslo_vmware.api [None req-b3971b83-ade8-4b15-9a4f-630b46b162f6 tempest-ServersAdminTestJSON-1695620084 tempest-ServersAdminTestJSON-1695620084-project-admin] Task: {'id': task-3508105, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.798343] env[69475]: DEBUG nova.network.neutron [req-eb8f762c-d6bb-474d-bed0-93fad65631ea req-34988145-aa3a-4de6-81ea-b2677f076343 service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Updated VIF entry in instance network info cache for port 60c1a160-2445-460f-a1ab-ee86bd91a07c. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 815.798814] env[69475]: DEBUG nova.network.neutron [req-eb8f762c-d6bb-474d-bed0-93fad65631ea req-34988145-aa3a-4de6-81ea-b2677f076343 service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Updating instance_info_cache with network_info: [{"id": "60c1a160-2445-460f-a1ab-ee86bd91a07c", "address": "fa:16:3e:cf:c8:34", "network": {"id": "4ef94f8a-061f-46ae-9a81-2ea0b3db007a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1623741139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "6fe52710b9d1461ea46698c9cf7bafb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad36dd36-1d2c-4f37-a259-98ef2e440794", "external-id": "nsx-vlan-transportzone-479", "segmentation_id": 479, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60c1a160-24", "ovs_interfaceid": "60c1a160-2445-460f-a1ab-ee86bd91a07c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 815.949684] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a316a6f-0d64-4768-95f2-d69d7fe79f37 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.958163] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a1c994-26d3-4a02-a71e-c8e2877b6e97 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.992064] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b653eac-c73f-4149-8df1-15b3e2ea8d59 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.999901] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff04bdf7-fe2c-4e07-8c5b-47a56177d76a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.018132] env[69475]: DEBUG nova.compute.provider_tree [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 816.193857] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508104, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.781249} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.197755] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] e960f967-d693-4ea8-9390-8b0232941c58/e960f967-d693-4ea8-9390-8b0232941c58.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 816.199370] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 816.199370] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523e961e-489b-aa9e-208d-6a59d399d38b, 'name': SearchDatastore_Task, 'duration_secs': 0.019451} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.199370] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0489f29d-1545-455b-a1e6-67bb4b4949f0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.201545] env[69475]: DEBUG oslo_concurrency.lockutils [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 816.201945] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 816.202261] env[69475]: DEBUG oslo_concurrency.lockutils [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.202423] env[69475]: DEBUG oslo_concurrency.lockutils [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 816.202608] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 816.202941] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2275edb-5010-4e76-a90c-8641d50f5c02 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.209521] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 816.209521] env[69475]: value = "task-3508106" [ 816.209521] env[69475]: _type = "Task" [ 816.209521] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.213919] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 816.214102] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 816.215280] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a13d26f-b6b2-4b7b-90f9-00223f1928bd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.220187] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508106, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.221432] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 816.221432] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528d5d89-3b5c-948a-c6cd-d42bce3c578a" [ 816.221432] env[69475]: _type = "Task" [ 816.221432] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.229249] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528d5d89-3b5c-948a-c6cd-d42bce3c578a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.276776] env[69475]: DEBUG oslo_vmware.api [None req-b3971b83-ade8-4b15-9a4f-630b46b162f6 tempest-ServersAdminTestJSON-1695620084 tempest-ServersAdminTestJSON-1695620084-project-admin] Task: {'id': task-3508105, 'name': ReconfigVM_Task, 'duration_secs': 0.212419} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.277096] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b3971b83-ade8-4b15-9a4f-630b46b162f6 tempest-ServersAdminTestJSON-1695620084 tempest-ServersAdminTestJSON-1695620084-project-admin] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Reconfigured VM instance to set the machine id {{(pid=69475) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 816.303148] env[69475]: DEBUG nova.compute.manager [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 816.305144] env[69475]: DEBUG oslo_concurrency.lockutils [req-eb8f762c-d6bb-474d-bed0-93fad65631ea req-34988145-aa3a-4de6-81ea-b2677f076343 service nova] Releasing lock "refresh_cache-5e3e57c5-8367-493f-8268-a0e496c8c878" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 816.322933] env[69475]: DEBUG nova.virt.hardware [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 816.323249] env[69475]: DEBUG nova.virt.hardware [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 816.323450] env[69475]: DEBUG nova.virt.hardware [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 816.323641] env[69475]: DEBUG nova.virt.hardware [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 816.323788] env[69475]: DEBUG nova.virt.hardware [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 816.323937] env[69475]: DEBUG nova.virt.hardware [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 816.324231] env[69475]: DEBUG nova.virt.hardware [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 816.324409] env[69475]: DEBUG nova.virt.hardware [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 816.324585] env[69475]: DEBUG nova.virt.hardware [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 816.324743] env[69475]: DEBUG nova.virt.hardware [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 816.324920] env[69475]: DEBUG nova.virt.hardware [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 816.326176] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-570a845f-3aa1-46a4-96e6-fd69f9d00965 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.334433] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a9c4c5-cb72-4543-873e-a350263acf24 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.521230] env[69475]: DEBUG nova.scheduler.client.report [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 816.718991] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508106, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073901} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.719317] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 816.720159] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17596561-c4e5-47d1-84e6-18854fcb92a1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.746847] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] e960f967-d693-4ea8-9390-8b0232941c58/e960f967-d693-4ea8-9390-8b0232941c58.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 816.750648] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21064da6-9ef5-4549-8c04-26a5a19887ed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.765302] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528d5d89-3b5c-948a-c6cd-d42bce3c578a, 'name': SearchDatastore_Task, 'duration_secs': 0.016823} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.766515] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17fb90cb-fbf5-43df-9b22-5531d202e1fa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.771182] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 816.771182] env[69475]: value = "task-3508107" [ 816.771182] env[69475]: _type = "Task" [ 816.771182] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.772548] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 816.772548] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ee7f6a-58ab-cbe9-0adb-e301dfd1769f" [ 816.772548] env[69475]: _type = "Task" [ 816.772548] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.784063] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508107, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.787072] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ee7f6a-58ab-cbe9-0adb-e301dfd1769f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.026683] env[69475]: DEBUG oslo_concurrency.lockutils [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.753s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.027178] env[69475]: DEBUG nova.compute.manager [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 817.030871] env[69475]: DEBUG oslo_concurrency.lockutils [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.034s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.031197] env[69475]: DEBUG nova.objects.instance [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lazy-loading 'resources' on Instance uuid df73dd41-7455-4482-abb2-b61b26fcf403 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 817.288498] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ee7f6a-58ab-cbe9-0adb-e301dfd1769f, 'name': SearchDatastore_Task, 'duration_secs': 0.030575} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.289047] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508107, 'name': ReconfigVM_Task, 'duration_secs': 0.345836} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.289047] env[69475]: DEBUG oslo_concurrency.lockutils [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 817.289366] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 5e3e57c5-8367-493f-8268-a0e496c8c878/5e3e57c5-8367-493f-8268-a0e496c8c878.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 817.289511] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Reconfigured VM instance instance-00000037 to attach disk [datastore1] e960f967-d693-4ea8-9390-8b0232941c58/e960f967-d693-4ea8-9390-8b0232941c58.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 817.290165] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b5c854cf-2ea7-4596-bba2-cd33fe37799d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.292018] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8086bfd2-ae13-4837-96cc-d28c08879416 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.298114] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 817.298114] env[69475]: value = "task-3508109" [ 817.298114] env[69475]: _type = "Task" [ 817.298114] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.299725] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 817.299725] env[69475]: value = "task-3508108" [ 817.299725] env[69475]: _type = "Task" [ 817.299725] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.311940] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508109, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.317795] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508108, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.319701] env[69475]: DEBUG nova.network.neutron [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Successfully updated port: ed004f95-f0d0-434e-a13d-54bff688d74e {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 817.322886] env[69475]: DEBUG nova.compute.manager [req-47f9b23b-5db2-4dce-89c0-7993550db6af req-101e1f40-0ec0-4b54-91b5-da8fde1b8bc1 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Received event network-vif-plugged-ed004f95-f0d0-434e-a13d-54bff688d74e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 817.323086] env[69475]: DEBUG oslo_concurrency.lockutils [req-47f9b23b-5db2-4dce-89c0-7993550db6af req-101e1f40-0ec0-4b54-91b5-da8fde1b8bc1 service nova] Acquiring lock "8d50b322-fa03-4e48-b74b-a63578e4701c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.323363] env[69475]: DEBUG oslo_concurrency.lockutils [req-47f9b23b-5db2-4dce-89c0-7993550db6af req-101e1f40-0ec0-4b54-91b5-da8fde1b8bc1 service nova] Lock "8d50b322-fa03-4e48-b74b-a63578e4701c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 817.323453] env[69475]: DEBUG oslo_concurrency.lockutils [req-47f9b23b-5db2-4dce-89c0-7993550db6af req-101e1f40-0ec0-4b54-91b5-da8fde1b8bc1 service nova] Lock "8d50b322-fa03-4e48-b74b-a63578e4701c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 817.323620] env[69475]: DEBUG nova.compute.manager [req-47f9b23b-5db2-4dce-89c0-7993550db6af req-101e1f40-0ec0-4b54-91b5-da8fde1b8bc1 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] No waiting events found dispatching network-vif-plugged-ed004f95-f0d0-434e-a13d-54bff688d74e {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 817.323778] env[69475]: WARNING nova.compute.manager [req-47f9b23b-5db2-4dce-89c0-7993550db6af req-101e1f40-0ec0-4b54-91b5-da8fde1b8bc1 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Received unexpected event network-vif-plugged-ed004f95-f0d0-434e-a13d-54bff688d74e for instance with vm_state building and task_state spawning. [ 817.537948] env[69475]: DEBUG nova.compute.utils [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 817.539776] env[69475]: DEBUG nova.compute.manager [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 817.540127] env[69475]: DEBUG nova.network.neutron [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 817.611732] env[69475]: DEBUG nova.policy [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7c7d445fe8644bc89ce066c05228758a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e5d454d98dea429da9c2cc9300ed9573', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 817.823508] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508109, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.823900] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508108, 'name': Rename_Task, 'duration_secs': 0.513938} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.823900] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 817.824102] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8dd219f2-2a5d-4c2e-9daf-497530d53291 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.826254] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "refresh_cache-8d50b322-fa03-4e48-b74b-a63578e4701c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 817.826383] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "refresh_cache-8d50b322-fa03-4e48-b74b-a63578e4701c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 817.826540] env[69475]: DEBUG nova.network.neutron [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 817.833601] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 817.833601] env[69475]: value = "task-3508110" [ 817.833601] env[69475]: _type = "Task" [ 817.833601] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.851496] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508110, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.920981] env[69475]: INFO nova.compute.manager [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Rebuilding instance [ 817.980409] env[69475]: DEBUG nova.compute.manager [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 817.985750] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995d9458-8a2a-429f-a085-c0045d89c1b7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.046811] env[69475]: DEBUG nova.compute.manager [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 818.108243] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c602994-d1b3-4d76-a124-44d2ea5aca94 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.117986] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e297b89-3d27-4f26-8c70-00e697e1932a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.156460] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01250ee6-91cb-4b58-b80b-dc48ac145c2e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.164523] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09bcce21-16bc-4ede-a653-5d1a43a3945b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.180706] env[69475]: DEBUG nova.compute.provider_tree [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 818.212067] env[69475]: DEBUG nova.network.neutron [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Successfully created port: 3041b80e-1b4f-454f-92b6-d002b52423b5 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 818.310904] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508109, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.690401} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.311249] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 5e3e57c5-8367-493f-8268-a0e496c8c878/5e3e57c5-8367-493f-8268-a0e496c8c878.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 818.311479] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 818.311745] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e70e64e3-4ada-41ff-a411-f132b816a58c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.318364] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 818.318364] env[69475]: value = "task-3508111" [ 818.318364] env[69475]: _type = "Task" [ 818.318364] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.327345] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508111, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.344740] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508110, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.376320] env[69475]: DEBUG nova.network.neutron [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 818.547060] env[69475]: DEBUG nova.network.neutron [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Updating instance_info_cache with network_info: [{"id": "ed004f95-f0d0-434e-a13d-54bff688d74e", "address": "fa:16:3e:3d:0c:05", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped004f95-f0", "ovs_interfaceid": "ed004f95-f0d0-434e-a13d-54bff688d74e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.550244] env[69475]: INFO nova.virt.block_device [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Booting with volume 44671911-bc3c-459e-8572-d2ff086a0071 at /dev/sda [ 818.595226] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-82a238a4-a627-496f-ba90-d46c5f9739b0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.604258] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0332c143-f66f-4d8f-9745-cefa18a9d6fc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.640526] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64398560-eb08-4ded-a190-48696c362efd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.648689] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4061d805-5f13-4fa8-a3b4-b3369538f338 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.688709] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705f8718-84fe-4dbf-834f-0c93fcc3d577 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.695482] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8f3a2f-74a7-4fce-819a-fa06216a9545 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.708962] env[69475]: DEBUG nova.virt.block_device [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Updating existing volume attachment record: 9c00d9e1-bf86-46ca-9441-d46e717e430a {{(pid=69475) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 818.717171] env[69475]: DEBUG nova.scheduler.client.report [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Updated inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da with generation 82 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 818.717467] env[69475]: DEBUG nova.compute.provider_tree [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Updating resource provider dd221100-68c1-4a75-92b5-b24d81fee5da generation from 82 to 83 during operation: update_inventory {{(pid=69475) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 818.717651] env[69475]: DEBUG nova.compute.provider_tree [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 818.827654] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508111, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068035} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.828616] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 818.829127] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1430f7a-cc6e-4a24-b921-21d27bfcffd3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.851073] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Reconfiguring VM instance instance-00000038 to attach disk [datastore2] 5e3e57c5-8367-493f-8268-a0e496c8c878/5e3e57c5-8367-493f-8268-a0e496c8c878.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 818.854147] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2df5bcab-27e3-400f-bc13-2b867b95fd82 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.872781] env[69475]: DEBUG oslo_vmware.api [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508110, 'name': PowerOnVM_Task, 'duration_secs': 0.761852} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.873972] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 818.874211] env[69475]: INFO nova.compute.manager [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Took 9.44 seconds to spawn the instance on the hypervisor. [ 818.874389] env[69475]: DEBUG nova.compute.manager [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 818.874707] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 818.874707] env[69475]: value = "task-3508112" [ 818.874707] env[69475]: _type = "Task" [ 818.874707] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.875390] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9af20e-133a-4f42-90ab-e6bb3d305856 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.889962] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508112, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.006635] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 819.006911] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-874c5449-1fba-423c-89aa-aeb90f643765 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.014662] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 819.014662] env[69475]: value = "task-3508113" [ 819.014662] env[69475]: _type = "Task" [ 819.014662] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.022668] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508113, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.053742] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "refresh_cache-8d50b322-fa03-4e48-b74b-a63578e4701c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.053902] env[69475]: DEBUG nova.compute.manager [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Instance network_info: |[{"id": "ed004f95-f0d0-434e-a13d-54bff688d74e", "address": "fa:16:3e:3d:0c:05", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped004f95-f0", "ovs_interfaceid": "ed004f95-f0d0-434e-a13d-54bff688d74e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 819.054326] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:0c:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee9ce73d-4ee8-4b28-b7d3-3a5735039627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ed004f95-f0d0-434e-a13d-54bff688d74e', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 819.062492] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Creating folder: Project (e87670cfd2b848af98507a5ebf9fab51). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 819.062796] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5b18d0de-66e1-4c64-aca3-edd84c9dfdf0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.074057] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Created folder: Project (e87670cfd2b848af98507a5ebf9fab51) in parent group-v700823. [ 819.074261] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Creating folder: Instances. Parent ref: group-v700988. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 819.074501] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60469b4c-5c88-4b7d-9002-eebabba0b488 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.083993] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Created folder: Instances in parent group-v700988. [ 819.084283] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 819.084468] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 819.084669] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2bb5f593-c822-42d7-88bb-eb813a4bf837 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.104443] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 819.104443] env[69475]: value = "task-3508116" [ 819.104443] env[69475]: _type = "Task" [ 819.104443] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.112468] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508116, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.224441] env[69475]: DEBUG oslo_concurrency.lockutils [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.194s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.230642] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.070s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.230642] env[69475]: DEBUG nova.objects.instance [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lazy-loading 'resources' on Instance uuid a75d7a92-4ac7-4fa0-90f0-f0a0993e881e {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 819.259103] env[69475]: INFO nova.scheduler.client.report [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Deleted allocations for instance df73dd41-7455-4482-abb2-b61b26fcf403 [ 819.389534] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508112, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.401018] env[69475]: INFO nova.compute.manager [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Took 43.68 seconds to build instance. [ 819.525244] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508113, 'name': PowerOffVM_Task, 'duration_secs': 0.2701} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.526164] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 819.526733] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 819.527754] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1925fc-9aa3-485a-94f5-782dde22da82 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.536512] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 819.536817] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f3eef348-f8b3-4d6d-8113-19f3ef4dab1f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.615911] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508116, 'name': CreateVM_Task, 'duration_secs': 0.348282} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.617581] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 819.619408] env[69475]: DEBUG nova.compute.manager [req-df2a5495-b165-43ac-82ff-8b19fc47fa89 req-a275f637-f629-4194-8ce9-03d786afaa70 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Received event network-changed-ed004f95-f0d0-434e-a13d-54bff688d74e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 819.619907] env[69475]: DEBUG nova.compute.manager [req-df2a5495-b165-43ac-82ff-8b19fc47fa89 req-a275f637-f629-4194-8ce9-03d786afaa70 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Refreshing instance network info cache due to event network-changed-ed004f95-f0d0-434e-a13d-54bff688d74e. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 819.620424] env[69475]: DEBUG oslo_concurrency.lockutils [req-df2a5495-b165-43ac-82ff-8b19fc47fa89 req-a275f637-f629-4194-8ce9-03d786afaa70 service nova] Acquiring lock "refresh_cache-8d50b322-fa03-4e48-b74b-a63578e4701c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.620727] env[69475]: DEBUG oslo_concurrency.lockutils [req-df2a5495-b165-43ac-82ff-8b19fc47fa89 req-a275f637-f629-4194-8ce9-03d786afaa70 service nova] Acquired lock "refresh_cache-8d50b322-fa03-4e48-b74b-a63578e4701c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.621029] env[69475]: DEBUG nova.network.neutron [req-df2a5495-b165-43ac-82ff-8b19fc47fa89 req-a275f637-f629-4194-8ce9-03d786afaa70 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Refreshing network info cache for port ed004f95-f0d0-434e-a13d-54bff688d74e {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 819.627927] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.627927] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.627927] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 819.628425] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 819.628748] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 819.628842] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Deleting the datastore file [datastore1] 235653ac-a893-4f42-a394-dd81f61f0d73 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 819.629386] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a912027-5287-4d83-a23a-40f987afddf0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.631198] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8db43d32-5a4e-452b-904d-b4f5829b0279 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.636768] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 819.636768] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52617371-9a03-3870-4af0-e20d8f308096" [ 819.636768] env[69475]: _type = "Task" [ 819.636768] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.641604] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 819.641604] env[69475]: value = "task-3508118" [ 819.641604] env[69475]: _type = "Task" [ 819.641604] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.647782] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52617371-9a03-3870-4af0-e20d8f308096, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.652555] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508118, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.736084] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquiring lock "b71882d4-537d-4a90-b43d-f8ac4ca0d90c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.736409] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "b71882d4-537d-4a90-b43d-f8ac4ca0d90c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.736840] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquiring lock "b71882d4-537d-4a90-b43d-f8ac4ca0d90c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.736840] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "b71882d4-537d-4a90-b43d-f8ac4ca0d90c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.736840] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "b71882d4-537d-4a90-b43d-f8ac4ca0d90c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.742702] env[69475]: INFO nova.compute.manager [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Terminating instance [ 819.778896] env[69475]: DEBUG oslo_concurrency.lockutils [None req-71a4bd8c-17b5-4e19-b1ba-08acd8118316 tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "df73dd41-7455-4482-abb2-b61b26fcf403" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.306s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.890672] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508112, 'name': ReconfigVM_Task, 'duration_secs': 0.539568} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.890908] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Reconfigured VM instance instance-00000038 to attach disk [datastore2] 5e3e57c5-8367-493f-8268-a0e496c8c878/5e3e57c5-8367-493f-8268-a0e496c8c878.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 819.891900] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2132e5bb-c3af-4e8c-9d42-a0a9d2f3adce {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.900055] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 819.900055] env[69475]: value = "task-3508119" [ 819.900055] env[69475]: _type = "Task" [ 819.900055] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.906715] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c807544a-7232-44ca-b7cd-7e18714c943b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "e960f967-d693-4ea8-9390-8b0232941c58" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.456s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.915925] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508119, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.150652] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52617371-9a03-3870-4af0-e20d8f308096, 'name': SearchDatastore_Task, 'duration_secs': 0.029295} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.154627] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.155071] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 820.155432] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.155714] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.156129] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 820.160672] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eb952262-8ef1-43ba-82d1-46667705e7f0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.163572] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508118, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.455317} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.166635] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 820.166942] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 820.167248] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 820.180132] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 820.180301] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 820.181089] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efc654f3-d2e7-4db2-82ff-b2fcae4e09b7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.190046] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 820.190046] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52bd2349-fb73-b03b-6346-a6e9351048ae" [ 820.190046] env[69475]: _type = "Task" [ 820.190046] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.198075] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52bd2349-fb73-b03b-6346-a6e9351048ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.201325] env[69475]: DEBUG nova.network.neutron [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Successfully updated port: 3041b80e-1b4f-454f-92b6-d002b52423b5 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 820.247198] env[69475]: DEBUG nova.compute.manager [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 820.247298] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 820.248269] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d321ed26-6e8c-490d-a94b-0090f13f4e19 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.258560] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 820.258816] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ebbbe9b8-48c0-4fa9-abb2-514ed26da636 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.269061] env[69475]: DEBUG oslo_vmware.api [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 820.269061] env[69475]: value = "task-3508120" [ 820.269061] env[69475]: _type = "Task" [ 820.269061] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.278946] env[69475]: DEBUG oslo_vmware.api [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508120, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.327022] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe37917-805f-4cf6-a7b4-272c0eff625c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.336477] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e0d2744-0016-463a-baea-09c9092fc298 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.374945] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07d0e140-0868-4887-a2b5-043e32734759 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.386942] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f42a139-5abc-4011-b96d-513a640538c9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.402313] env[69475]: DEBUG nova.compute.provider_tree [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 820.412081] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508119, 'name': Rename_Task, 'duration_secs': 0.230211} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.412550] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 820.413432] env[69475]: DEBUG nova.network.neutron [req-df2a5495-b165-43ac-82ff-8b19fc47fa89 req-a275f637-f629-4194-8ce9-03d786afaa70 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Updated VIF entry in instance network info cache for port ed004f95-f0d0-434e-a13d-54bff688d74e. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 820.413792] env[69475]: DEBUG nova.network.neutron [req-df2a5495-b165-43ac-82ff-8b19fc47fa89 req-a275f637-f629-4194-8ce9-03d786afaa70 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Updating instance_info_cache with network_info: [{"id": "ed004f95-f0d0-434e-a13d-54bff688d74e", "address": "fa:16:3e:3d:0c:05", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped004f95-f0", "ovs_interfaceid": "ed004f95-f0d0-434e-a13d-54bff688d74e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.414825] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-15501e27-a529-4894-be65-c200595807f3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.416923] env[69475]: DEBUG nova.compute.manager [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 820.425301] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 820.425301] env[69475]: value = "task-3508121" [ 820.425301] env[69475]: _type = "Task" [ 820.425301] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.435541] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508121, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.701168] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52bd2349-fb73-b03b-6346-a6e9351048ae, 'name': SearchDatastore_Task, 'duration_secs': 0.020488} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.702070] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5812399f-a7db-41c1-8145-8f73192fdc9d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.707169] env[69475]: DEBUG oslo_concurrency.lockutils [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Acquiring lock "refresh_cache-8cc0636c-84af-4f68-bec8-1493b421a605" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.707169] env[69475]: DEBUG oslo_concurrency.lockutils [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Acquired lock "refresh_cache-8cc0636c-84af-4f68-bec8-1493b421a605" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 820.707169] env[69475]: DEBUG nova.network.neutron [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 820.713319] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 820.713319] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ccb293-f8f9-3ea8-db9d-32d05e11d21e" [ 820.713319] env[69475]: _type = "Task" [ 820.713319] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.725744] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ccb293-f8f9-3ea8-db9d-32d05e11d21e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.780843] env[69475]: DEBUG oslo_vmware.api [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508120, 'name': PowerOffVM_Task, 'duration_secs': 0.254551} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.781133] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 820.781304] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 820.781675] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21b97648-f912-4736-9d08-e7e6b746f5fe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.834301] env[69475]: DEBUG nova.compute.manager [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 820.835033] env[69475]: DEBUG nova.virt.hardware [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 820.835463] env[69475]: DEBUG nova.virt.hardware [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 820.835759] env[69475]: DEBUG nova.virt.hardware [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 820.836103] env[69475]: DEBUG nova.virt.hardware [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 820.836422] env[69475]: DEBUG nova.virt.hardware [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 820.836697] env[69475]: DEBUG nova.virt.hardware [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 820.837095] env[69475]: DEBUG nova.virt.hardware [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 820.837440] env[69475]: DEBUG nova.virt.hardware [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 820.837749] env[69475]: DEBUG nova.virt.hardware [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 820.838068] env[69475]: DEBUG nova.virt.hardware [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 820.838406] env[69475]: DEBUG nova.virt.hardware [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 820.841030] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df2cfaea-4515-41bf-a235-b931801a325d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.846607] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 820.846975] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 820.847298] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Deleting the datastore file [datastore2] b71882d4-537d-4a90-b43d-f8ac4ca0d90c {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 820.849947] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f829b9a9-51e3-4bfb-808c-05239953488c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.853398] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5626b86-436e-4596-8f38-f47b21452cdb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.871194] env[69475]: DEBUG oslo_vmware.api [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 820.871194] env[69475]: value = "task-3508123" [ 820.871194] env[69475]: _type = "Task" [ 820.871194] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.879740] env[69475]: DEBUG oslo_vmware.api [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508123, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.917521] env[69475]: DEBUG oslo_concurrency.lockutils [req-df2a5495-b165-43ac-82ff-8b19fc47fa89 req-a275f637-f629-4194-8ce9-03d786afaa70 service nova] Releasing lock "refresh_cache-8d50b322-fa03-4e48-b74b-a63578e4701c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 820.936238] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508121, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.941425] env[69475]: DEBUG oslo_concurrency.lockutils [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 820.942350] env[69475]: DEBUG nova.scheduler.client.report [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Updated inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da with generation 83 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 820.942570] env[69475]: DEBUG nova.compute.provider_tree [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Updating resource provider dd221100-68c1-4a75-92b5-b24d81fee5da generation from 83 to 84 during operation: update_inventory {{(pid=69475) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 820.942751] env[69475]: DEBUG nova.compute.provider_tree [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 821.207513] env[69475]: DEBUG nova.virt.hardware [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 821.207811] env[69475]: DEBUG nova.virt.hardware [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 821.208051] env[69475]: DEBUG nova.virt.hardware [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 821.208289] env[69475]: DEBUG nova.virt.hardware [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 821.208483] env[69475]: DEBUG nova.virt.hardware [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 821.208697] env[69475]: DEBUG nova.virt.hardware [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 821.209013] env[69475]: DEBUG nova.virt.hardware [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 821.209236] env[69475]: DEBUG nova.virt.hardware [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 821.211426] env[69475]: DEBUG nova.virt.hardware [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 821.211426] env[69475]: DEBUG nova.virt.hardware [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 821.211426] env[69475]: DEBUG nova.virt.hardware [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 821.213337] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89697d3b-7a8c-4204-821d-098850c56c83 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.224740] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "e960f967-d693-4ea8-9390-8b0232941c58" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.224740] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "e960f967-d693-4ea8-9390-8b0232941c58" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.232915] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ccb293-f8f9-3ea8-db9d-32d05e11d21e, 'name': SearchDatastore_Task, 'duration_secs': 0.013589} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.236397] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e1fddea-f951-4b49-803e-15437287fcfe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.239465] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.239579] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 8d50b322-fa03-4e48-b74b-a63578e4701c/8d50b322-fa03-4e48-b74b-a63578e4701c.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 821.240346] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-164a2c94-8d38-4023-927f-60f80656cb4a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.254787] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:13:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31ac3fea-ebf4-4bed-bf70-1eaecdf71280', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e51856c-7355-448c-82fc-e5af23bb0fcf', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 821.263621] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 821.265748] env[69475]: DEBUG nova.network.neutron [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 821.268041] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 821.268501] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 821.268501] env[69475]: value = "task-3508124" [ 821.268501] env[69475]: _type = "Task" [ 821.268501] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.268786] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f005d9d7-a460-4ba8-bf29-252dd8cbeb81 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.293476] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508124, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.294855] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 821.294855] env[69475]: value = "task-3508125" [ 821.294855] env[69475]: _type = "Task" [ 821.294855] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.302418] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508125, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.383840] env[69475]: DEBUG oslo_vmware.api [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508123, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.274442} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.384140] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 821.384389] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 821.384593] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 821.384766] env[69475]: INFO nova.compute.manager [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 821.385076] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 821.385291] env[69475]: DEBUG nova.compute.manager [-] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 821.385404] env[69475]: DEBUG nova.network.neutron [-] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 821.435951] env[69475]: DEBUG oslo_vmware.api [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508121, 'name': PowerOnVM_Task, 'duration_secs': 0.782581} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.436285] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 821.436492] env[69475]: INFO nova.compute.manager [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Took 9.21 seconds to spawn the instance on the hypervisor. [ 821.436671] env[69475]: DEBUG nova.compute.manager [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 821.439642] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a06200e-080c-4b73-9d55-bf03b0a140ec {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.453026] env[69475]: DEBUG nova.network.neutron [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Updating instance_info_cache with network_info: [{"id": "3041b80e-1b4f-454f-92b6-d002b52423b5", "address": "fa:16:3e:8f:51:41", "network": {"id": "fdf12f35-f15a-4e19-8404-a57b06812497", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-417366095-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5d454d98dea429da9c2cc9300ed9573", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3041b80e-1b", "ovs_interfaceid": "3041b80e-1b4f-454f-92b6-d002b52423b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.453026] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.224s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.454792] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.596s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.456865] env[69475]: INFO nova.compute.claims [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 821.478882] env[69475]: INFO nova.scheduler.client.report [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Deleted allocations for instance a75d7a92-4ac7-4fa0-90f0-f0a0993e881e [ 821.729939] env[69475]: DEBUG nova.compute.utils [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 821.795607] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508124, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.805033] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508125, 'name': CreateVM_Task, 'duration_secs': 0.416333} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.805210] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 821.805879] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.806078] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.806407] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 821.806665] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2632a71e-0672-4b7a-9935-6d99fc6b3a13 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.811596] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 821.811596] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528ed931-62ea-717e-8c0a-b8b575da2dd0" [ 821.811596] env[69475]: _type = "Task" [ 821.811596] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.819660] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528ed931-62ea-717e-8c0a-b8b575da2dd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.956257] env[69475]: DEBUG oslo_concurrency.lockutils [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Releasing lock "refresh_cache-8cc0636c-84af-4f68-bec8-1493b421a605" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.956257] env[69475]: DEBUG nova.compute.manager [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Instance network_info: |[{"id": "3041b80e-1b4f-454f-92b6-d002b52423b5", "address": "fa:16:3e:8f:51:41", "network": {"id": "fdf12f35-f15a-4e19-8404-a57b06812497", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-417366095-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5d454d98dea429da9c2cc9300ed9573", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3041b80e-1b", "ovs_interfaceid": "3041b80e-1b4f-454f-92b6-d002b52423b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 821.960432] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:51:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '411f389f-4e4f-4450-891e-38944cac6135', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3041b80e-1b4f-454f-92b6-d002b52423b5', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 821.967935] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Creating folder: Project (e5d454d98dea429da9c2cc9300ed9573). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 821.971585] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-04d8b1f4-0cc1-45ca-aec5-6398cd03fef7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.973756] env[69475]: INFO nova.compute.manager [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Took 42.83 seconds to build instance. [ 821.976672] env[69475]: DEBUG nova.compute.manager [req-09f18086-abfa-46be-bf25-7d4e209bca64 req-542c5f32-cf62-45ff-bb3b-43ca2e3f8d13 service nova] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Received event network-vif-plugged-3041b80e-1b4f-454f-92b6-d002b52423b5 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 821.977394] env[69475]: DEBUG oslo_concurrency.lockutils [req-09f18086-abfa-46be-bf25-7d4e209bca64 req-542c5f32-cf62-45ff-bb3b-43ca2e3f8d13 service nova] Acquiring lock "8cc0636c-84af-4f68-bec8-1493b421a605-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.977394] env[69475]: DEBUG oslo_concurrency.lockutils [req-09f18086-abfa-46be-bf25-7d4e209bca64 req-542c5f32-cf62-45ff-bb3b-43ca2e3f8d13 service nova] Lock "8cc0636c-84af-4f68-bec8-1493b421a605-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.977394] env[69475]: DEBUG oslo_concurrency.lockutils [req-09f18086-abfa-46be-bf25-7d4e209bca64 req-542c5f32-cf62-45ff-bb3b-43ca2e3f8d13 service nova] Lock "8cc0636c-84af-4f68-bec8-1493b421a605-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.977394] env[69475]: DEBUG nova.compute.manager [req-09f18086-abfa-46be-bf25-7d4e209bca64 req-542c5f32-cf62-45ff-bb3b-43ca2e3f8d13 service nova] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] No waiting events found dispatching network-vif-plugged-3041b80e-1b4f-454f-92b6-d002b52423b5 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 821.977589] env[69475]: WARNING nova.compute.manager [req-09f18086-abfa-46be-bf25-7d4e209bca64 req-542c5f32-cf62-45ff-bb3b-43ca2e3f8d13 service nova] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Received unexpected event network-vif-plugged-3041b80e-1b4f-454f-92b6-d002b52423b5 for instance with vm_state building and task_state spawning. [ 821.977689] env[69475]: DEBUG nova.compute.manager [req-09f18086-abfa-46be-bf25-7d4e209bca64 req-542c5f32-cf62-45ff-bb3b-43ca2e3f8d13 service nova] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Received event network-changed-3041b80e-1b4f-454f-92b6-d002b52423b5 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 821.978464] env[69475]: DEBUG nova.compute.manager [req-09f18086-abfa-46be-bf25-7d4e209bca64 req-542c5f32-cf62-45ff-bb3b-43ca2e3f8d13 service nova] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Refreshing instance network info cache due to event network-changed-3041b80e-1b4f-454f-92b6-d002b52423b5. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 821.978464] env[69475]: DEBUG oslo_concurrency.lockutils [req-09f18086-abfa-46be-bf25-7d4e209bca64 req-542c5f32-cf62-45ff-bb3b-43ca2e3f8d13 service nova] Acquiring lock "refresh_cache-8cc0636c-84af-4f68-bec8-1493b421a605" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.978464] env[69475]: DEBUG oslo_concurrency.lockutils [req-09f18086-abfa-46be-bf25-7d4e209bca64 req-542c5f32-cf62-45ff-bb3b-43ca2e3f8d13 service nova] Acquired lock "refresh_cache-8cc0636c-84af-4f68-bec8-1493b421a605" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.978464] env[69475]: DEBUG nova.network.neutron [req-09f18086-abfa-46be-bf25-7d4e209bca64 req-542c5f32-cf62-45ff-bb3b-43ca2e3f8d13 service nova] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Refreshing network info cache for port 3041b80e-1b4f-454f-92b6-d002b52423b5 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 821.986656] env[69475]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 821.986822] env[69475]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=69475) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 821.989412] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Folder already exists: Project (e5d454d98dea429da9c2cc9300ed9573). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 821.989611] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Creating folder: Instances. Parent ref: group-v700947. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 821.990055] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0dac9042-003b-4b3e-a295-6ddd64425bbb tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "a75d7a92-4ac7-4fa0-90f0-f0a0993e881e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.236s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.990998] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f23ca5f8-7b4f-4cd0-a699-78282dfd99b2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.002034] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Created folder: Instances in parent group-v700947. [ 822.002168] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 822.002790] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 822.002999] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7fd2e9e5-c4b2-4fda-adb3-5b3fdbbe9198 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.022299] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 822.022299] env[69475]: value = "task-3508128" [ 822.022299] env[69475]: _type = "Task" [ 822.022299] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.030624] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508128, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.235065] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "e960f967-d693-4ea8-9390-8b0232941c58" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.258599] env[69475]: DEBUG nova.network.neutron [-] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.293973] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508124, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.579544} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.294365] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 8d50b322-fa03-4e48-b74b-a63578e4701c/8d50b322-fa03-4e48-b74b-a63578e4701c.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 822.294623] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 822.295336] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bb1d1c53-ba8c-4f14-b1ab-e1c922b0e061 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.302254] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 822.302254] env[69475]: value = "task-3508129" [ 822.302254] env[69475]: _type = "Task" [ 822.302254] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.310785] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508129, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.321490] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528ed931-62ea-717e-8c0a-b8b575da2dd0, 'name': SearchDatastore_Task, 'duration_secs': 0.017227} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.322689] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.323141] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 822.323443] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.323566] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.323758] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 822.324096] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fbeac69-b964-4fe9-955e-32fe85ab7c84 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.335710] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 822.335927] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 822.336711] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08cbb150-f494-4a65-b395-7cad0abb3997 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.344589] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 822.344589] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525eb295-5886-19f0-2e17-eeff497a95cf" [ 822.344589] env[69475]: _type = "Task" [ 822.344589] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.350411] env[69475]: DEBUG oslo_vmware.rw_handles [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f6ab87-262b-e083-4bf7-c05c9f2e9ce7/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 822.351360] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a15421a-dc36-45ba-b615-c56a82884196 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.359580] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525eb295-5886-19f0-2e17-eeff497a95cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.363147] env[69475]: DEBUG oslo_vmware.rw_handles [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f6ab87-262b-e083-4bf7-c05c9f2e9ce7/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 822.363324] env[69475]: ERROR oslo_vmware.rw_handles [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f6ab87-262b-e083-4bf7-c05c9f2e9ce7/disk-0.vmdk due to incomplete transfer. [ 822.363545] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-eacb0d79-8ee1-43cd-a575-a263bfcf9a71 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.370282] env[69475]: DEBUG oslo_vmware.rw_handles [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f6ab87-262b-e083-4bf7-c05c9f2e9ce7/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 822.370595] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Uploaded image e248cf7a-f2b1-4f73-b442-4f4396e08e5b to the Glance image server {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 822.372348] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 822.372601] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1fc29575-0395-4976-9977-26bd13d4f974 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.378411] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 822.378411] env[69475]: value = "task-3508130" [ 822.378411] env[69475]: _type = "Task" [ 822.378411] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.386647] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508130, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.481117] env[69475]: DEBUG oslo_concurrency.lockutils [None req-12dd33cb-5b4a-46a0-9ccb-a43677be43d1 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "5e3e57c5-8367-493f-8268-a0e496c8c878" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.746s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.534708] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508128, 'name': CreateVM_Task, 'duration_secs': 0.386225} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.534891] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 822.535610] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700950', 'volume_id': '44671911-bc3c-459e-8572-d2ff086a0071', 'name': 'volume-44671911-bc3c-459e-8572-d2ff086a0071', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8cc0636c-84af-4f68-bec8-1493b421a605', 'attached_at': '', 'detached_at': '', 'volume_id': '44671911-bc3c-459e-8572-d2ff086a0071', 'serial': '44671911-bc3c-459e-8572-d2ff086a0071'}, 'device_type': None, 'attachment_id': '9c00d9e1-bf86-46ca-9441-d46e717e430a', 'mount_device': '/dev/sda', 'delete_on_termination': True, 'boot_index': 0, 'guest_format': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69475) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 822.535768] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Root volume attach. Driver type: vmdk {{(pid=69475) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 822.537098] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c89295-1385-4ae2-bcd7-6b9cb13d187c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.546130] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3d0176-f1ba-4d8f-8511-a08114bc0da2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.552291] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b0ac79-4ba9-49f3-9231-fdc3de112abb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.560087] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-b64e2369-c587-4c48-9d0d-4499b775475a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.567313] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Waiting for the task: (returnval){ [ 822.567313] env[69475]: value = "task-3508131" [ 822.567313] env[69475]: _type = "Task" [ 822.567313] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.575882] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508131, 'name': RelocateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.761520] env[69475]: INFO nova.compute.manager [-] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Took 1.38 seconds to deallocate network for instance. [ 822.783788] env[69475]: DEBUG nova.network.neutron [req-09f18086-abfa-46be-bf25-7d4e209bca64 req-542c5f32-cf62-45ff-bb3b-43ca2e3f8d13 service nova] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Updated VIF entry in instance network info cache for port 3041b80e-1b4f-454f-92b6-d002b52423b5. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 822.784266] env[69475]: DEBUG nova.network.neutron [req-09f18086-abfa-46be-bf25-7d4e209bca64 req-542c5f32-cf62-45ff-bb3b-43ca2e3f8d13 service nova] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Updating instance_info_cache with network_info: [{"id": "3041b80e-1b4f-454f-92b6-d002b52423b5", "address": "fa:16:3e:8f:51:41", "network": {"id": "fdf12f35-f15a-4e19-8404-a57b06812497", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-417366095-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5d454d98dea429da9c2cc9300ed9573", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3041b80e-1b", "ovs_interfaceid": "3041b80e-1b4f-454f-92b6-d002b52423b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.822131] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508129, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.162457} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.822431] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 822.823308] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e361e31d-4ca0-420c-b317-7dc701924716 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.866190] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Reconfiguring VM instance instance-00000039 to attach disk [datastore2] 8d50b322-fa03-4e48-b74b-a63578e4701c/8d50b322-fa03-4e48-b74b-a63578e4701c.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 822.875315] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec51cc04-e131-427a-8e3a-4db0fccde834 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.900422] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525eb295-5886-19f0-2e17-eeff497a95cf, 'name': SearchDatastore_Task, 'duration_secs': 0.014856} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.903063] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 822.903063] env[69475]: value = "task-3508132" [ 822.903063] env[69475]: _type = "Task" [ 822.903063] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.905917] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-138eb830-dd01-47cf-b8a9-4871adf4787f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.915793] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508130, 'name': Destroy_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.919916] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 822.919916] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a4130d-45b2-45ce-e568-8b8c14b85ec6" [ 822.919916] env[69475]: _type = "Task" [ 822.919916] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.925346] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508132, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.944993] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a4130d-45b2-45ce-e568-8b8c14b85ec6, 'name': SearchDatastore_Task, 'duration_secs': 0.012685} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.945470] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.945988] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 235653ac-a893-4f42-a394-dd81f61f0d73/235653ac-a893-4f42-a394-dd81f61f0d73.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 822.946401] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6f69b32-0d3f-4134-b129-30dc8767a45f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.956292] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 822.956292] env[69475]: value = "task-3508133" [ 822.956292] env[69475]: _type = "Task" [ 822.956292] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.971636] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508133, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.064121] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7611561-14e9-46c7-8b20-b3e368cf35d6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.082295] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508131, 'name': RelocateVM_Task} progress is 43%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.084969] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d217f9d5-cdd2-4bb4-bedd-edddce14fb89 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.118691] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa57f28-99ea-4e61-862b-a8561d3de3ae {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.128765] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b8051fe-0cd8-4f9e-a69d-4068b8b547ad {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.146110] env[69475]: DEBUG nova.compute.provider_tree [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 823.275527] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.287920] env[69475]: DEBUG oslo_concurrency.lockutils [req-09f18086-abfa-46be-bf25-7d4e209bca64 req-542c5f32-cf62-45ff-bb3b-43ca2e3f8d13 service nova] Releasing lock "refresh_cache-8cc0636c-84af-4f68-bec8-1493b421a605" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.343687] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "e960f967-d693-4ea8-9390-8b0232941c58" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.343687] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "e960f967-d693-4ea8-9390-8b0232941c58" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.343687] env[69475]: INFO nova.compute.manager [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Attaching volume df29d466-8554-4613-aa48-1be45554c1b5 to /dev/sdb [ 823.408887] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508130, 'name': Destroy_Task, 'duration_secs': 0.844576} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.414870] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Destroyed the VM [ 823.415159] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 823.416761] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4ef02d0d-2549-4a18-ba98-5ae1e0385850 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.423483] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c043dd-9166-4bef-8d99-09f09aab9862 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.433366] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508132, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.439413] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 823.439413] env[69475]: value = "task-3508134" [ 823.439413] env[69475]: _type = "Task" [ 823.439413] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.448469] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd88bd8-4ac9-42f3-8f91-372f1feee6ee {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.462172] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508134, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.469432] env[69475]: DEBUG nova.virt.block_device [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Updating existing volume attachment record: fee7c51f-d142-416a-ab3f-c8801a5da731 {{(pid=69475) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 823.477903] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508133, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.582233] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508131, 'name': RelocateVM_Task} progress is 58%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.649914] env[69475]: DEBUG nova.scheduler.client.report [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 823.924444] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508132, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.956130] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508134, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.974500] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508133, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563817} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.974869] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 235653ac-a893-4f42-a394-dd81f61f0d73/235653ac-a893-4f42-a394-dd81f61f0d73.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 823.975085] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 823.975297] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4b6fb31f-9dd6-42f4-a65a-8d9e3b515225 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.985183] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 823.985183] env[69475]: value = "task-3508138" [ 823.985183] env[69475]: _type = "Task" [ 823.985183] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.999892] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508138, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.083429] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508131, 'name': RelocateVM_Task} progress is 71%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.157307] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.702s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.158015] env[69475]: DEBUG nova.compute.manager [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 824.162014] env[69475]: DEBUG oslo_concurrency.lockutils [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.124s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.163628] env[69475]: INFO nova.compute.claims [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.421912] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508132, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.460162] env[69475]: DEBUG oslo_vmware.api [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508134, 'name': RemoveSnapshot_Task, 'duration_secs': 0.803303} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.460574] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 824.460810] env[69475]: INFO nova.compute.manager [None req-e87e36c1-a9da-46d7-8378-b7791b34e91c tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Took 18.22 seconds to snapshot the instance on the hypervisor. [ 824.497235] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508138, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073839} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.497235] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 824.499232] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b08ecb4-8d27-46df-8d67-3aa62c31fa26 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.528858] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 235653ac-a893-4f42-a394-dd81f61f0d73/235653ac-a893-4f42-a394-dd81f61f0d73.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 824.529074] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bfeb7dc2-ec9f-41b5-b66a-cf59f7288d22 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.548279] env[69475]: DEBUG nova.compute.manager [req-cc7343f6-49fe-47b1-9288-6609a2186b4b req-5f3bdc32-6958-43a1-ab7f-c9d94b794bc8 service nova] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Received event network-vif-deleted-f46198f7-e2cd-4d21-8b63-33c585b37c57 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 824.548404] env[69475]: DEBUG nova.compute.manager [req-cc7343f6-49fe-47b1-9288-6609a2186b4b req-5f3bdc32-6958-43a1-ab7f-c9d94b794bc8 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Received event network-changed-5abe617e-d18a-416f-8c40-d0da33a563d2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 824.548565] env[69475]: DEBUG nova.compute.manager [req-cc7343f6-49fe-47b1-9288-6609a2186b4b req-5f3bdc32-6958-43a1-ab7f-c9d94b794bc8 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Refreshing instance network info cache due to event network-changed-5abe617e-d18a-416f-8c40-d0da33a563d2. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 824.548779] env[69475]: DEBUG oslo_concurrency.lockutils [req-cc7343f6-49fe-47b1-9288-6609a2186b4b req-5f3bdc32-6958-43a1-ab7f-c9d94b794bc8 service nova] Acquiring lock "refresh_cache-b41845c6-46bd-4b3b-ab26-d7d2dad08f84" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.549119] env[69475]: DEBUG oslo_concurrency.lockutils [req-cc7343f6-49fe-47b1-9288-6609a2186b4b req-5f3bdc32-6958-43a1-ab7f-c9d94b794bc8 service nova] Acquired lock "refresh_cache-b41845c6-46bd-4b3b-ab26-d7d2dad08f84" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.549334] env[69475]: DEBUG nova.network.neutron [req-cc7343f6-49fe-47b1-9288-6609a2186b4b req-5f3bdc32-6958-43a1-ab7f-c9d94b794bc8 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Refreshing network info cache for port 5abe617e-d18a-416f-8c40-d0da33a563d2 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 824.560332] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 824.560332] env[69475]: value = "task-3508139" [ 824.560332] env[69475]: _type = "Task" [ 824.560332] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.572271] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508139, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.583466] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508131, 'name': RelocateVM_Task} progress is 84%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.675236] env[69475]: DEBUG nova.compute.utils [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 824.676979] env[69475]: DEBUG nova.compute.manager [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 824.677218] env[69475]: DEBUG nova.network.neutron [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 824.759090] env[69475]: DEBUG oslo_concurrency.lockutils [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "980bb0eb-121c-4703-a453-fb0b4351e9e3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.759643] env[69475]: DEBUG oslo_concurrency.lockutils [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "980bb0eb-121c-4703-a453-fb0b4351e9e3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.764457] env[69475]: DEBUG nova.policy [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10edfc7213ac43b6a87eee0594e5bc22', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef7fb53bce6145da8fe1e2f8beb57807', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 824.922359] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508132, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.071190] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508139, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.082127] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508131, 'name': RelocateVM_Task} progress is 97%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.182099] env[69475]: DEBUG nova.compute.manager [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 825.262154] env[69475]: DEBUG nova.compute.manager [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 825.314013] env[69475]: DEBUG nova.network.neutron [req-cc7343f6-49fe-47b1-9288-6609a2186b4b req-5f3bdc32-6958-43a1-ab7f-c9d94b794bc8 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Updated VIF entry in instance network info cache for port 5abe617e-d18a-416f-8c40-d0da33a563d2. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 825.314399] env[69475]: DEBUG nova.network.neutron [req-cc7343f6-49fe-47b1-9288-6609a2186b4b req-5f3bdc32-6958-43a1-ab7f-c9d94b794bc8 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Updating instance_info_cache with network_info: [{"id": "5abe617e-d18a-416f-8c40-d0da33a563d2", "address": "fa:16:3e:8a:cb:f9", "network": {"id": "4ef94f8a-061f-46ae-9a81-2ea0b3db007a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1623741139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.188", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "6fe52710b9d1461ea46698c9cf7bafb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad36dd36-1d2c-4f37-a259-98ef2e440794", "external-id": "nsx-vlan-transportzone-479", "segmentation_id": 479, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5abe617e-d1", "ovs_interfaceid": "5abe617e-d18a-416f-8c40-d0da33a563d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.423675] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508132, 'name': ReconfigVM_Task, 'duration_secs': 2.132553} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.423974] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Reconfigured VM instance instance-00000039 to attach disk [datastore2] 8d50b322-fa03-4e48-b74b-a63578e4701c/8d50b322-fa03-4e48-b74b-a63578e4701c.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 825.424635] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fde40237-0b9d-4bf5-86c8-fafea4433ded {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.433551] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 825.433551] env[69475]: value = "task-3508140" [ 825.433551] env[69475]: _type = "Task" [ 825.433551] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.445531] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508140, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.578916] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508139, 'name': ReconfigVM_Task, 'duration_secs': 0.8442} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.581810] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 235653ac-a893-4f42-a394-dd81f61f0d73/235653ac-a893-4f42-a394-dd81f61f0d73.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 825.588750] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-44f81f12-1435-4836-bac2-51b4235fb11e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.597026] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508131, 'name': RelocateVM_Task} progress is 98%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.597026] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 825.597026] env[69475]: value = "task-3508141" [ 825.597026] env[69475]: _type = "Task" [ 825.597026] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.606099] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508141, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.702419] env[69475]: DEBUG nova.network.neutron [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Successfully created port: 7ca686e9-6693-4490-aabc-712796a8fe04 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 825.779364] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f43b0c6-5d52-46c5-a7bc-fbe843f7e1cd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.783389] env[69475]: DEBUG oslo_concurrency.lockutils [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.788315] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a18fa9d-2bb2-471b-93b0-78ffa19a3b16 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.819190] env[69475]: DEBUG oslo_concurrency.lockutils [req-cc7343f6-49fe-47b1-9288-6609a2186b4b req-5f3bdc32-6958-43a1-ab7f-c9d94b794bc8 service nova] Releasing lock "refresh_cache-b41845c6-46bd-4b3b-ab26-d7d2dad08f84" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.820289] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c98572b-daaf-4305-ad96-5b29609ef0f6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.827834] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2721673-026d-4e1d-887a-cdedd7a8ca5a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.848933] env[69475]: DEBUG nova.compute.provider_tree [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 825.943785] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508140, 'name': Rename_Task, 'duration_secs': 0.285226} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.943785] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 825.943785] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ff10e1a-9f8a-4fa6-8255-7d4e8561f608 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.949512] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 825.949512] env[69475]: value = "task-3508142" [ 825.949512] env[69475]: _type = "Task" [ 825.949512] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.960221] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508142, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.083052] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508131, 'name': RelocateVM_Task, 'duration_secs': 3.192679} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.087391] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Volume attach. Driver type: vmdk {{(pid=69475) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 826.087513] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700950', 'volume_id': '44671911-bc3c-459e-8572-d2ff086a0071', 'name': 'volume-44671911-bc3c-459e-8572-d2ff086a0071', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8cc0636c-84af-4f68-bec8-1493b421a605', 'attached_at': '', 'detached_at': '', 'volume_id': '44671911-bc3c-459e-8572-d2ff086a0071', 'serial': '44671911-bc3c-459e-8572-d2ff086a0071'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 826.088340] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f4de11-316e-4ef2-861d-e74e703e3ab4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.109429] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b9d2be4-c9f2-4c8a-9e18-ac81c6cc1f22 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.117286] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508141, 'name': Rename_Task, 'duration_secs': 0.420044} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.128572] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 826.139947] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] volume-44671911-bc3c-459e-8572-d2ff086a0071/volume-44671911-bc3c-459e-8572-d2ff086a0071.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 826.140568] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-90bbedb1-07ec-41c3-83d0-252de4b052e2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.142952] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e956b3be-e96b-48e0-9270-ab29bd85b9ed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.164825] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 826.164825] env[69475]: value = "task-3508144" [ 826.164825] env[69475]: _type = "Task" [ 826.164825] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.166209] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Waiting for the task: (returnval){ [ 826.166209] env[69475]: value = "task-3508145" [ 826.166209] env[69475]: _type = "Task" [ 826.166209] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.177585] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508144, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.180375] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508145, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.195217] env[69475]: DEBUG nova.compute.manager [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 826.224636] env[69475]: DEBUG nova.virt.hardware [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 826.224909] env[69475]: DEBUG nova.virt.hardware [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 826.225080] env[69475]: DEBUG nova.virt.hardware [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 826.225269] env[69475]: DEBUG nova.virt.hardware [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 826.225413] env[69475]: DEBUG nova.virt.hardware [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 826.225558] env[69475]: DEBUG nova.virt.hardware [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 826.225769] env[69475]: DEBUG nova.virt.hardware [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 826.225931] env[69475]: DEBUG nova.virt.hardware [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 826.226161] env[69475]: DEBUG nova.virt.hardware [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 826.226366] env[69475]: DEBUG nova.virt.hardware [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 826.226568] env[69475]: DEBUG nova.virt.hardware [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 826.227462] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d7c2a7-be21-422f-9345-441aab11f96a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.235108] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ec5684-28c8-4538-8c08-527de3996a98 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.374144] env[69475]: ERROR nova.scheduler.client.report [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [req-85b66611-82f6-480a-8220-ce3e0760a692] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dd221100-68c1-4a75-92b5-b24d81fee5da. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-85b66611-82f6-480a-8220-ce3e0760a692"}]} [ 826.393679] env[69475]: DEBUG nova.scheduler.client.report [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Refreshing inventories for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 826.409020] env[69475]: DEBUG nova.scheduler.client.report [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Updating ProviderTree inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 826.409268] env[69475]: DEBUG nova.compute.provider_tree [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 826.420685] env[69475]: DEBUG nova.scheduler.client.report [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Refreshing aggregate associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, aggregates: None {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 826.440868] env[69475]: DEBUG nova.scheduler.client.report [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Refreshing trait associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 826.459567] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508142, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.687650] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508145, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.687768] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508144, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.975323] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508142, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.982098] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-360e48ab-95dc-4840-bc09-96bbb315145d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.992323] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66810a18-3807-467e-94b0-dce961661d5a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.041051] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98040ad7-c064-49d4-8e19-12718e4fabff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.053427] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1427b75a-eb70-4832-9cef-62037a93c12b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.068283] env[69475]: DEBUG nova.compute.provider_tree [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 827.183871] env[69475]: DEBUG oslo_vmware.api [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508144, 'name': PowerOnVM_Task, 'duration_secs': 0.86472} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.187042] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 827.187278] env[69475]: DEBUG nova.compute.manager [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 827.187609] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508145, 'name': ReconfigVM_Task, 'duration_secs': 0.831785} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.188330] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203bc0a6-f581-4aea-aade-29935d17181a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.190867] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Reconfigured VM instance instance-0000003a to attach disk [datastore1] volume-44671911-bc3c-459e-8572-d2ff086a0071/volume-44671911-bc3c-459e-8572-d2ff086a0071.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 827.198111] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd197e85-ae2a-4200-b305-d0d8370b1133 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.210244] env[69475]: DEBUG nova.compute.manager [req-8507d262-756b-4153-9f9d-52097995c85f req-65d2a86c-3955-4172-9c73-6b08847d6f2d service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Received event network-changed-5abe617e-d18a-416f-8c40-d0da33a563d2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 827.210447] env[69475]: DEBUG nova.compute.manager [req-8507d262-756b-4153-9f9d-52097995c85f req-65d2a86c-3955-4172-9c73-6b08847d6f2d service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Refreshing instance network info cache due to event network-changed-5abe617e-d18a-416f-8c40-d0da33a563d2. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 827.211934] env[69475]: DEBUG oslo_concurrency.lockutils [req-8507d262-756b-4153-9f9d-52097995c85f req-65d2a86c-3955-4172-9c73-6b08847d6f2d service nova] Acquiring lock "refresh_cache-b41845c6-46bd-4b3b-ab26-d7d2dad08f84" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.212153] env[69475]: DEBUG oslo_concurrency.lockutils [req-8507d262-756b-4153-9f9d-52097995c85f req-65d2a86c-3955-4172-9c73-6b08847d6f2d service nova] Acquired lock "refresh_cache-b41845c6-46bd-4b3b-ab26-d7d2dad08f84" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 827.212347] env[69475]: DEBUG nova.network.neutron [req-8507d262-756b-4153-9f9d-52097995c85f req-65d2a86c-3955-4172-9c73-6b08847d6f2d service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Refreshing network info cache for port 5abe617e-d18a-416f-8c40-d0da33a563d2 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 827.220796] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Waiting for the task: (returnval){ [ 827.220796] env[69475]: value = "task-3508146" [ 827.220796] env[69475]: _type = "Task" [ 827.220796] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.234106] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508146, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.463723] env[69475]: DEBUG oslo_vmware.api [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508142, 'name': PowerOnVM_Task, 'duration_secs': 1.404648} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.464082] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 827.464357] env[69475]: INFO nova.compute.manager [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Took 11.16 seconds to spawn the instance on the hypervisor. [ 827.464589] env[69475]: DEBUG nova.compute.manager [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 827.465667] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38cc1efa-8833-401d-a72b-05fd2a9db80c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.549275] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquiring lock "d1a316d5-59ef-4286-9d7e-a444ffadc49d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.549830] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "d1a316d5-59ef-4286-9d7e-a444ffadc49d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.549830] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquiring lock "d1a316d5-59ef-4286-9d7e-a444ffadc49d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.549931] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "d1a316d5-59ef-4286-9d7e-a444ffadc49d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.550110] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "d1a316d5-59ef-4286-9d7e-a444ffadc49d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.553493] env[69475]: INFO nova.compute.manager [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Terminating instance [ 827.602332] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.602803] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.603210] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.603543] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.603850] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.606645] env[69475]: INFO nova.compute.manager [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Terminating instance [ 827.609483] env[69475]: DEBUG nova.scheduler.client.report [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Updated inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da with generation 85 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 827.609914] env[69475]: DEBUG nova.compute.provider_tree [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Updating resource provider dd221100-68c1-4a75-92b5-b24d81fee5da generation from 85 to 86 during operation: update_inventory {{(pid=69475) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 827.610270] env[69475]: DEBUG nova.compute.provider_tree [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 827.735115] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.741491] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508146, 'name': ReconfigVM_Task, 'duration_secs': 0.170237} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.741817] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700950', 'volume_id': '44671911-bc3c-459e-8572-d2ff086a0071', 'name': 'volume-44671911-bc3c-459e-8572-d2ff086a0071', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8cc0636c-84af-4f68-bec8-1493b421a605', 'attached_at': '', 'detached_at': '', 'volume_id': '44671911-bc3c-459e-8572-d2ff086a0071', 'serial': '44671911-bc3c-459e-8572-d2ff086a0071'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 827.742394] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-02a1982b-d611-4977-a65b-8a3264dfbb5a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.752165] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Waiting for the task: (returnval){ [ 827.752165] env[69475]: value = "task-3508147" [ 827.752165] env[69475]: _type = "Task" [ 827.752165] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.759589] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508147, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.854547] env[69475]: DEBUG nova.network.neutron [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Successfully updated port: 7ca686e9-6693-4490-aabc-712796a8fe04 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 827.931852] env[69475]: DEBUG nova.network.neutron [req-8507d262-756b-4153-9f9d-52097995c85f req-65d2a86c-3955-4172-9c73-6b08847d6f2d service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Updated VIF entry in instance network info cache for port 5abe617e-d18a-416f-8c40-d0da33a563d2. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 827.933345] env[69475]: DEBUG nova.network.neutron [req-8507d262-756b-4153-9f9d-52097995c85f req-65d2a86c-3955-4172-9c73-6b08847d6f2d service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Updating instance_info_cache with network_info: [{"id": "5abe617e-d18a-416f-8c40-d0da33a563d2", "address": "fa:16:3e:8a:cb:f9", "network": {"id": "4ef94f8a-061f-46ae-9a81-2ea0b3db007a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1623741139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "6fe52710b9d1461ea46698c9cf7bafb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad36dd36-1d2c-4f37-a259-98ef2e440794", "external-id": "nsx-vlan-transportzone-479", "segmentation_id": 479, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5abe617e-d1", "ovs_interfaceid": "5abe617e-d18a-416f-8c40-d0da33a563d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.994389] env[69475]: INFO nova.compute.manager [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Took 45.65 seconds to build instance. [ 828.057558] env[69475]: DEBUG nova.compute.manager [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 828.057795] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 828.058713] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c1b2336-f5c4-4878-a625-250820bd9299 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.067307] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 828.067569] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d44bdb3c-0f9b-4b29-a51d-7107ceec2441 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.074816] env[69475]: DEBUG oslo_vmware.api [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 828.074816] env[69475]: value = "task-3508148" [ 828.074816] env[69475]: _type = "Task" [ 828.074816] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.085988] env[69475]: DEBUG oslo_vmware.api [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3508148, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.118034] env[69475]: DEBUG oslo_concurrency.lockutils [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.956s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.118534] env[69475]: DEBUG nova.compute.manager [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 828.122337] env[69475]: DEBUG nova.compute.manager [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 828.123365] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 828.123365] env[69475]: DEBUG oslo_concurrency.lockutils [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.080s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.123365] env[69475]: DEBUG nova.objects.instance [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lazy-loading 'resources' on Instance uuid 3fba85c9-7798-4a66-b335-21f80962e0bd {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 828.125702] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b30b37-aedd-440b-8858-2530d5b274f4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.137927] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 828.138196] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00a60044-91b2-4812-9f97-07e1339ed677 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.221660] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 828.222118] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 828.222382] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Deleting the datastore file [datastore1] e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 828.222730] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a62277e4-315e-4a1b-a3b9-34e3eec51c8e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.229664] env[69475]: DEBUG oslo_vmware.api [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 828.229664] env[69475]: value = "task-3508150" [ 828.229664] env[69475]: _type = "Task" [ 828.229664] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.240930] env[69475]: DEBUG oslo_vmware.api [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508150, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.261851] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508147, 'name': Rename_Task, 'duration_secs': 0.374926} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.262225] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 828.262517] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf95f32a-d470-482f-9a7a-a387bd005463 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.269249] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Waiting for the task: (returnval){ [ 828.269249] env[69475]: value = "task-3508151" [ 828.269249] env[69475]: _type = "Task" [ 828.269249] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.280440] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508151, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.358737] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "refresh_cache-86647493-8b2c-46bd-94d3-c973e843f778" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.358737] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquired lock "refresh_cache-86647493-8b2c-46bd-94d3-c973e843f778" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 828.358737] env[69475]: DEBUG nova.network.neutron [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 828.439465] env[69475]: DEBUG oslo_concurrency.lockutils [req-8507d262-756b-4153-9f9d-52097995c85f req-65d2a86c-3955-4172-9c73-6b08847d6f2d service nova] Releasing lock "refresh_cache-b41845c6-46bd-4b3b-ab26-d7d2dad08f84" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 828.439826] env[69475]: DEBUG nova.compute.manager [req-8507d262-756b-4153-9f9d-52097995c85f req-65d2a86c-3955-4172-9c73-6b08847d6f2d service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Received event network-changed-60c1a160-2445-460f-a1ab-ee86bd91a07c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 828.440093] env[69475]: DEBUG nova.compute.manager [req-8507d262-756b-4153-9f9d-52097995c85f req-65d2a86c-3955-4172-9c73-6b08847d6f2d service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Refreshing instance network info cache due to event network-changed-60c1a160-2445-460f-a1ab-ee86bd91a07c. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 828.440388] env[69475]: DEBUG oslo_concurrency.lockutils [req-8507d262-756b-4153-9f9d-52097995c85f req-65d2a86c-3955-4172-9c73-6b08847d6f2d service nova] Acquiring lock "refresh_cache-5e3e57c5-8367-493f-8268-a0e496c8c878" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.440585] env[69475]: DEBUG oslo_concurrency.lockutils [req-8507d262-756b-4153-9f9d-52097995c85f req-65d2a86c-3955-4172-9c73-6b08847d6f2d service nova] Acquired lock "refresh_cache-5e3e57c5-8367-493f-8268-a0e496c8c878" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 828.440822] env[69475]: DEBUG nova.network.neutron [req-8507d262-756b-4153-9f9d-52097995c85f req-65d2a86c-3955-4172-9c73-6b08847d6f2d service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Refreshing network info cache for port 60c1a160-2445-460f-a1ab-ee86bd91a07c {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 828.499950] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a4eb1a4e-81b1-4640-b0de-d079e048a0f2 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "8d50b322-fa03-4e48-b74b-a63578e4701c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.233s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.532641] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Volume attach. Driver type: vmdk {{(pid=69475) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 828.532898] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700995', 'volume_id': 'df29d466-8554-4613-aa48-1be45554c1b5', 'name': 'volume-df29d466-8554-4613-aa48-1be45554c1b5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e960f967-d693-4ea8-9390-8b0232941c58', 'attached_at': '', 'detached_at': '', 'volume_id': 'df29d466-8554-4613-aa48-1be45554c1b5', 'serial': 'df29d466-8554-4613-aa48-1be45554c1b5'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 828.533888] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f929706d-73d0-4ebd-b197-89e7604eb794 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.550322] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05083aad-697a-49e4-95c4-43638678bfaa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.576172] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] volume-df29d466-8554-4613-aa48-1be45554c1b5/volume-df29d466-8554-4613-aa48-1be45554c1b5.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 828.576804] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72ed3abb-2942-4300-b2f6-c41e90b73594 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.598383] env[69475]: DEBUG oslo_vmware.api [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 828.598383] env[69475]: value = "task-3508152" [ 828.598383] env[69475]: _type = "Task" [ 828.598383] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.601349] env[69475]: DEBUG oslo_vmware.api [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3508148, 'name': PowerOffVM_Task, 'duration_secs': 0.224946} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.604474] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 828.604646] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 828.604878] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-93010e11-e06e-448e-8a40-e6e1df58ee92 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.612356] env[69475]: DEBUG oslo_vmware.api [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508152, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.627093] env[69475]: DEBUG nova.compute.utils [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 828.628871] env[69475]: DEBUG nova.compute.manager [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 828.629143] env[69475]: DEBUG nova.network.neutron [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 828.668652] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 828.668949] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 828.669217] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Deleting the datastore file [datastore2] d1a316d5-59ef-4286-9d7e-a444ffadc49d {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 828.669545] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-72575df0-e8d5-4ee2-a4a1-60d924a6906e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.678113] env[69475]: DEBUG oslo_vmware.api [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for the task: (returnval){ [ 828.678113] env[69475]: value = "task-3508154" [ 828.678113] env[69475]: _type = "Task" [ 828.678113] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.686876] env[69475]: DEBUG oslo_vmware.api [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3508154, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.696959] env[69475]: DEBUG nova.policy [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc345af1a2c34fba98fa191b637a284a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2ba1a4125454d39bc92b6123447d98a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 828.747829] env[69475]: DEBUG oslo_concurrency.lockutils [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "eadfea6c-3fce-4f54-b889-d994d61ec14f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.748148] env[69475]: DEBUG oslo_concurrency.lockutils [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "eadfea6c-3fce-4f54-b889-d994d61ec14f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.749505] env[69475]: DEBUG oslo_vmware.api [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508150, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24009} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.753061] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 828.753216] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 828.754262] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 828.754262] env[69475]: INFO nova.compute.manager [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Took 0.63 seconds to destroy the instance on the hypervisor. [ 828.754262] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 828.754770] env[69475]: DEBUG nova.compute.manager [-] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 828.754770] env[69475]: DEBUG nova.network.neutron [-] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 828.780352] env[69475]: DEBUG oslo_vmware.api [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508151, 'name': PowerOnVM_Task, 'duration_secs': 0.485071} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.783216] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 828.783445] env[69475]: INFO nova.compute.manager [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Took 7.95 seconds to spawn the instance on the hypervisor. [ 828.783636] env[69475]: DEBUG nova.compute.manager [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 828.784833] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1d6737-27ad-4a81-a477-1d49e4e33f12 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.939378] env[69475]: DEBUG nova.network.neutron [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 829.114326] env[69475]: DEBUG oslo_vmware.api [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508152, 'name': ReconfigVM_Task, 'duration_secs': 0.449336} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.114985] env[69475]: DEBUG nova.network.neutron [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Successfully created port: 30f10284-138a-4774-b024-33ffa906ef81 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 829.119463] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Reconfigured VM instance instance-00000037 to attach disk [datastore1] volume-df29d466-8554-4613-aa48-1be45554c1b5/volume-df29d466-8554-4613-aa48-1be45554c1b5.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 829.127404] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a811910a-7628-4248-b406-9d5be050e73c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.137952] env[69475]: DEBUG nova.compute.manager [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 829.146874] env[69475]: DEBUG oslo_vmware.api [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 829.146874] env[69475]: value = "task-3508155" [ 829.146874] env[69475]: _type = "Task" [ 829.146874] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.159741] env[69475]: DEBUG oslo_vmware.api [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508155, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.188325] env[69475]: DEBUG oslo_vmware.api [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Task: {'id': task-3508154, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.220686} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.188751] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 829.188751] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 829.188929] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 829.189304] env[69475]: INFO nova.compute.manager [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Took 1.13 seconds to destroy the instance on the hypervisor. [ 829.189378] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 829.189572] env[69475]: DEBUG nova.compute.manager [-] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 829.189665] env[69475]: DEBUG nova.network.neutron [-] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 829.253393] env[69475]: DEBUG nova.compute.manager [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 829.297060] env[69475]: DEBUG nova.network.neutron [req-8507d262-756b-4153-9f9d-52097995c85f req-65d2a86c-3955-4172-9c73-6b08847d6f2d service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Updated VIF entry in instance network info cache for port 60c1a160-2445-460f-a1ab-ee86bd91a07c. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 829.297938] env[69475]: DEBUG nova.network.neutron [req-8507d262-756b-4153-9f9d-52097995c85f req-65d2a86c-3955-4172-9c73-6b08847d6f2d service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Updating instance_info_cache with network_info: [{"id": "60c1a160-2445-460f-a1ab-ee86bd91a07c", "address": "fa:16:3e:cf:c8:34", "network": {"id": "4ef94f8a-061f-46ae-9a81-2ea0b3db007a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1623741139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "6fe52710b9d1461ea46698c9cf7bafb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad36dd36-1d2c-4f37-a259-98ef2e440794", "external-id": "nsx-vlan-transportzone-479", "segmentation_id": 479, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60c1a160-24", "ovs_interfaceid": "60c1a160-2445-460f-a1ab-ee86bd91a07c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.307404] env[69475]: DEBUG nova.network.neutron [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Updating instance_info_cache with network_info: [{"id": "7ca686e9-6693-4490-aabc-712796a8fe04", "address": "fa:16:3e:d3:5c:f8", "network": {"id": "b2ee7427-b6b5-4fb8-acdf-fa1d5ecaaeb1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-464853755-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ef7fb53bce6145da8fe1e2f8beb57807", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ca686e9-66", "ovs_interfaceid": "7ca686e9-6693-4490-aabc-712796a8fe04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.311167] env[69475]: INFO nova.compute.manager [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Took 44.85 seconds to build instance. [ 829.334009] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e3a8b9-8c7e-4949-9a0c-9cdefc692f43 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.342980] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed840388-afa8-4d08-98df-f2bceae99558 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.381148] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79cde07c-8120-4a23-8da2-815e07555dee {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.389723] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ece2062-cb14-4601-ac54-7d90e79203e0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.404766] env[69475]: DEBUG nova.compute.provider_tree [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 829.657897] env[69475]: DEBUG oslo_vmware.api [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508155, 'name': ReconfigVM_Task, 'duration_secs': 0.233831} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.658407] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700995', 'volume_id': 'df29d466-8554-4613-aa48-1be45554c1b5', 'name': 'volume-df29d466-8554-4613-aa48-1be45554c1b5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e960f967-d693-4ea8-9390-8b0232941c58', 'attached_at': '', 'detached_at': '', 'volume_id': 'df29d466-8554-4613-aa48-1be45554c1b5', 'serial': 'df29d466-8554-4613-aa48-1be45554c1b5'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 829.788946] env[69475]: DEBUG oslo_concurrency.lockutils [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.796020] env[69475]: DEBUG nova.compute.manager [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Received event network-vif-plugged-7ca686e9-6693-4490-aabc-712796a8fe04 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 829.796020] env[69475]: DEBUG oslo_concurrency.lockutils [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] Acquiring lock "86647493-8b2c-46bd-94d3-c973e843f778-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 829.796214] env[69475]: DEBUG oslo_concurrency.lockutils [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] Lock "86647493-8b2c-46bd-94d3-c973e843f778-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 829.796383] env[69475]: DEBUG oslo_concurrency.lockutils [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] Lock "86647493-8b2c-46bd-94d3-c973e843f778-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.796548] env[69475]: DEBUG nova.compute.manager [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] No waiting events found dispatching network-vif-plugged-7ca686e9-6693-4490-aabc-712796a8fe04 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 829.796708] env[69475]: WARNING nova.compute.manager [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Received unexpected event network-vif-plugged-7ca686e9-6693-4490-aabc-712796a8fe04 for instance with vm_state building and task_state spawning. [ 829.796862] env[69475]: DEBUG nova.compute.manager [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Received event network-changed-7ca686e9-6693-4490-aabc-712796a8fe04 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 829.797020] env[69475]: DEBUG nova.compute.manager [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Refreshing instance network info cache due to event network-changed-7ca686e9-6693-4490-aabc-712796a8fe04. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 829.797188] env[69475]: DEBUG oslo_concurrency.lockutils [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] Acquiring lock "refresh_cache-86647493-8b2c-46bd-94d3-c973e843f778" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 829.804601] env[69475]: DEBUG oslo_concurrency.lockutils [req-8507d262-756b-4153-9f9d-52097995c85f req-65d2a86c-3955-4172-9c73-6b08847d6f2d service nova] Releasing lock "refresh_cache-5e3e57c5-8367-493f-8268-a0e496c8c878" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.811509] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Releasing lock "refresh_cache-86647493-8b2c-46bd-94d3-c973e843f778" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.811725] env[69475]: DEBUG nova.compute.manager [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Instance network_info: |[{"id": "7ca686e9-6693-4490-aabc-712796a8fe04", "address": "fa:16:3e:d3:5c:f8", "network": {"id": "b2ee7427-b6b5-4fb8-acdf-fa1d5ecaaeb1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-464853755-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ef7fb53bce6145da8fe1e2f8beb57807", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ca686e9-66", "ovs_interfaceid": "7ca686e9-6693-4490-aabc-712796a8fe04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 829.812015] env[69475]: DEBUG oslo_concurrency.lockutils [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] Acquired lock "refresh_cache-86647493-8b2c-46bd-94d3-c973e843f778" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 829.812210] env[69475]: DEBUG nova.network.neutron [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Refreshing network info cache for port 7ca686e9-6693-4490-aabc-712796a8fe04 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 829.814129] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:5c:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '457c42cd-4ddb-4374-923e-d419b7f6eaff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7ca686e9-6693-4490-aabc-712796a8fe04', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 829.822210] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Creating folder: Project (ef7fb53bce6145da8fe1e2f8beb57807). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 829.823928] env[69475]: DEBUG oslo_concurrency.lockutils [None req-229ccad0-86cd-41b6-bedd-ca2c4f3cf1d2 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Lock "8cc0636c-84af-4f68-bec8-1493b421a605" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.635s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.824152] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5d42211-12a5-47ff-bf2d-bd3966d14de2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.827219] env[69475]: DEBUG nova.network.neutron [-] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.837684] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Created folder: Project (ef7fb53bce6145da8fe1e2f8beb57807) in parent group-v700823. [ 829.837890] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Creating folder: Instances. Parent ref: group-v700996. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 829.838141] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ba3c0016-29f1-44fa-80d6-b62581ba41eb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.848446] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Created folder: Instances in parent group-v700996. [ 829.848586] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 829.848669] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 829.849151] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e0e7ec1-8430-4f3f-9054-33b10ed207b0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.868124] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 829.868124] env[69475]: value = "task-3508158" [ 829.868124] env[69475]: _type = "Task" [ 829.868124] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.876117] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508158, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.928583] env[69475]: ERROR nova.scheduler.client.report [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] [req-7b98f2f1-3ba2-4383-94ec-40dbcfd27a72] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dd221100-68c1-4a75-92b5-b24d81fee5da. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7b98f2f1-3ba2-4383-94ec-40dbcfd27a72"}]} [ 829.950209] env[69475]: DEBUG nova.scheduler.client.report [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Refreshing inventories for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 829.965178] env[69475]: DEBUG nova.scheduler.client.report [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Updating ProviderTree inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 829.965405] env[69475]: DEBUG nova.compute.provider_tree [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 829.980205] env[69475]: DEBUG nova.scheduler.client.report [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Refreshing aggregate associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, aggregates: None {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 830.006211] env[69475]: DEBUG nova.scheduler.client.report [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Refreshing trait associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 830.155447] env[69475]: DEBUG nova.compute.manager [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 830.164301] env[69475]: DEBUG nova.network.neutron [-] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.195214] env[69475]: DEBUG nova.virt.hardware [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 830.195465] env[69475]: DEBUG nova.virt.hardware [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 830.195637] env[69475]: DEBUG nova.virt.hardware [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 830.195822] env[69475]: DEBUG nova.virt.hardware [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 830.195967] env[69475]: DEBUG nova.virt.hardware [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 830.199256] env[69475]: DEBUG nova.virt.hardware [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 830.199488] env[69475]: DEBUG nova.virt.hardware [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 830.199655] env[69475]: DEBUG nova.virt.hardware [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 830.199832] env[69475]: DEBUG nova.virt.hardware [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 830.200046] env[69475]: DEBUG nova.virt.hardware [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 830.200214] env[69475]: DEBUG nova.virt.hardware [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 830.201444] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d051989e-0df9-4e69-a2a9-09b0bac49e68 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.218317] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c0dafb-885f-47ab-985c-f1649df4917a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.331043] env[69475]: INFO nova.compute.manager [-] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Took 1.57 seconds to deallocate network for instance. [ 830.386960] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508158, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.541829] env[69475]: DEBUG nova.compute.manager [req-291bbdae-ddb9-4d98-af0a-86dc6b1dfb7d req-4ddbe5fd-46b0-479a-a166-b6aa15e63b7c service nova] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Received event network-vif-deleted-4bac6b08-29dc-45f8-bd32-4adb28c6ea48 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 830.553061] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "5e3e57c5-8367-493f-8268-a0e496c8c878" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.553994] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "5e3e57c5-8367-493f-8268-a0e496c8c878" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.553994] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "5e3e57c5-8367-493f-8268-a0e496c8c878-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.553994] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "5e3e57c5-8367-493f-8268-a0e496c8c878-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.553994] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "5e3e57c5-8367-493f-8268-a0e496c8c878-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.556542] env[69475]: INFO nova.compute.manager [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Terminating instance [ 830.579759] env[69475]: DEBUG nova.network.neutron [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Updated VIF entry in instance network info cache for port 7ca686e9-6693-4490-aabc-712796a8fe04. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 830.580255] env[69475]: DEBUG nova.network.neutron [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Updating instance_info_cache with network_info: [{"id": "7ca686e9-6693-4490-aabc-712796a8fe04", "address": "fa:16:3e:d3:5c:f8", "network": {"id": "b2ee7427-b6b5-4fb8-acdf-fa1d5ecaaeb1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-464853755-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ef7fb53bce6145da8fe1e2f8beb57807", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ca686e9-66", "ovs_interfaceid": "7ca686e9-6693-4490-aabc-712796a8fe04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.668442] env[69475]: INFO nova.compute.manager [-] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Took 1.48 seconds to deallocate network for instance. [ 830.732857] env[69475]: DEBUG nova.objects.instance [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lazy-loading 'flavor' on Instance uuid e960f967-d693-4ea8-9390-8b0232941c58 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 830.766524] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e15e80c3-c759-4d13-b34d-c875d6ddb6c9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.775293] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-661fe67b-4f57-4f51-859b-ec4205656b6e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.807680] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5d0b33-035d-49c8-bce1-7771f5d30045 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.815256] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0782ca6-c38d-443c-8ad9-24ebeca80062 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.829076] env[69475]: DEBUG nova.compute.provider_tree [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 830.836639] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.878783] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508158, 'name': CreateVM_Task, 'duration_secs': 0.822813} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.878947] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 830.880286] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.880463] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.880791] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 830.881099] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc2c32e3-e8ec-4f7b-974a-32f6a88a1f32 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.885352] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 830.885352] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525218c7-babd-027c-eab6-b774fef94dfc" [ 830.885352] env[69475]: _type = "Task" [ 830.885352] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.893647] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525218c7-babd-027c-eab6-b774fef94dfc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.966330] env[69475]: DEBUG nova.network.neutron [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Successfully updated port: 30f10284-138a-4774-b024-33ffa906ef81 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 831.062602] env[69475]: DEBUG nova.compute.manager [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 831.062826] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 831.064225] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-857c363d-b4dc-49c0-ad2e-4c41323a6d00 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.073173] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 831.073448] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-53bceb4d-ebbf-4363-b904-daf38b44c0dc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.080249] env[69475]: DEBUG oslo_vmware.api [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 831.080249] env[69475]: value = "task-3508159" [ 831.080249] env[69475]: _type = "Task" [ 831.080249] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.084332] env[69475]: DEBUG oslo_concurrency.lockutils [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] Releasing lock "refresh_cache-86647493-8b2c-46bd-94d3-c973e843f778" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.084968] env[69475]: DEBUG nova.compute.manager [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Received event network-changed-60c1a160-2445-460f-a1ab-ee86bd91a07c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 831.084968] env[69475]: DEBUG nova.compute.manager [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Refreshing instance network info cache due to event network-changed-60c1a160-2445-460f-a1ab-ee86bd91a07c. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 831.085214] env[69475]: DEBUG oslo_concurrency.lockutils [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] Acquiring lock "refresh_cache-5e3e57c5-8367-493f-8268-a0e496c8c878" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.085478] env[69475]: DEBUG oslo_concurrency.lockutils [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] Acquired lock "refresh_cache-5e3e57c5-8367-493f-8268-a0e496c8c878" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.085663] env[69475]: DEBUG nova.network.neutron [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Refreshing network info cache for port 60c1a160-2445-460f-a1ab-ee86bd91a07c {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 831.094453] env[69475]: DEBUG oslo_vmware.api [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508159, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.178687] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 831.238053] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a390034f-32b9-434a-b454-186fc537bcec tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "e960f967-d693-4ea8-9390-8b0232941c58" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.896s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.366897] env[69475]: DEBUG nova.scheduler.client.report [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Updated inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da with generation 87 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 831.367687] env[69475]: DEBUG nova.compute.provider_tree [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Updating resource provider dd221100-68c1-4a75-92b5-b24d81fee5da generation from 87 to 88 during operation: update_inventory {{(pid=69475) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 831.367687] env[69475]: DEBUG nova.compute.provider_tree [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 831.402403] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525218c7-babd-027c-eab6-b774fef94dfc, 'name': SearchDatastore_Task, 'duration_secs': 0.011492} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.403121] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.403660] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 831.404042] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.404201] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.406265] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 831.406265] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f11d91d-8bb8-4590-863e-f1fe84744e34 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.427719] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 831.427937] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 831.428706] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ae18d03-fbe6-47bd-bb9c-e14046d32166 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.436982] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 831.436982] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52340e91-bb8b-6182-6fc0-005d8cf05d57" [ 831.436982] env[69475]: _type = "Task" [ 831.436982] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.445742] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52340e91-bb8b-6182-6fc0-005d8cf05d57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.472423] env[69475]: DEBUG oslo_concurrency.lockutils [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "refresh_cache-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 831.472607] env[69475]: DEBUG oslo_concurrency.lockutils [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "refresh_cache-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.472831] env[69475]: DEBUG nova.network.neutron [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 831.595087] env[69475]: DEBUG oslo_vmware.api [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508159, 'name': PowerOffVM_Task, 'duration_secs': 0.219662} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.595768] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 831.595827] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 831.596761] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb67875a-4991-40f9-b0f6-8ea14a6f1201 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.644640] env[69475]: INFO nova.compute.manager [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Rebuilding instance [ 831.668926] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 831.669490] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 831.670690] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Deleting the datastore file [datastore2] 5e3e57c5-8367-493f-8268-a0e496c8c878 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 831.670762] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aed81f45-d82e-452e-a9ae-80e1ae444a21 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.682200] env[69475]: DEBUG oslo_vmware.api [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 831.682200] env[69475]: value = "task-3508161" [ 831.682200] env[69475]: _type = "Task" [ 831.682200] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.697268] env[69475]: DEBUG oslo_vmware.api [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508161, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.708731] env[69475]: DEBUG nova.compute.manager [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 831.710153] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f882b836-7601-496b-9d05-f56185713f31 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.874214] env[69475]: DEBUG oslo_concurrency.lockutils [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.751s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.880855] env[69475]: DEBUG oslo_concurrency.lockutils [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.829s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.881929] env[69475]: DEBUG nova.objects.instance [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lazy-loading 'resources' on Instance uuid 420ecc09-60c8-4a14-8504-d11d760ddbb4 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 831.906256] env[69475]: INFO nova.scheduler.client.report [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Deleted allocations for instance 3fba85c9-7798-4a66-b335-21f80962e0bd [ 831.949647] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52340e91-bb8b-6182-6fc0-005d8cf05d57, 'name': SearchDatastore_Task, 'duration_secs': 0.012457} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.950646] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62f6de55-ec65-4fdf-8406-d89c8b54e4f5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.958462] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 831.958462] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521747d9-000c-5247-b166-122617154c9a" [ 831.958462] env[69475]: _type = "Task" [ 831.958462] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.968847] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521747d9-000c-5247-b166-122617154c9a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.024208] env[69475]: DEBUG nova.network.neutron [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 832.088580] env[69475]: DEBUG nova.network.neutron [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Updated VIF entry in instance network info cache for port 60c1a160-2445-460f-a1ab-ee86bd91a07c. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 832.088928] env[69475]: DEBUG nova.network.neutron [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Updating instance_info_cache with network_info: [{"id": "60c1a160-2445-460f-a1ab-ee86bd91a07c", "address": "fa:16:3e:cf:c8:34", "network": {"id": "4ef94f8a-061f-46ae-9a81-2ea0b3db007a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1623741139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "6fe52710b9d1461ea46698c9cf7bafb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad36dd36-1d2c-4f37-a259-98ef2e440794", "external-id": "nsx-vlan-transportzone-479", "segmentation_id": 479, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60c1a160-24", "ovs_interfaceid": "60c1a160-2445-460f-a1ab-ee86bd91a07c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.187237] env[69475]: DEBUG nova.network.neutron [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Updating instance_info_cache with network_info: [{"id": "30f10284-138a-4774-b024-33ffa906ef81", "address": "fa:16:3e:49:ea:cb", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30f10284-13", "ovs_interfaceid": "30f10284-138a-4774-b024-33ffa906ef81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.193784] env[69475]: DEBUG oslo_vmware.api [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508161, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.325292} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.194366] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 832.194366] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 832.194463] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 832.194551] env[69475]: INFO nova.compute.manager [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Took 1.13 seconds to destroy the instance on the hypervisor. [ 832.194812] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 832.195020] env[69475]: DEBUG nova.compute.manager [-] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 832.195137] env[69475]: DEBUG nova.network.neutron [-] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 832.319654] env[69475]: DEBUG nova.compute.manager [req-ec131bbd-f85a-46eb-a7a0-cbf68a7d87ca req-46864756-928e-4717-a892-bdd194698d19 service nova] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Received event network-vif-deleted-6c87b79b-ed3d-448d-a02d-1004956a1d8d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 832.319997] env[69475]: DEBUG nova.compute.manager [req-ec131bbd-f85a-46eb-a7a0-cbf68a7d87ca req-46864756-928e-4717-a892-bdd194698d19 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Received event network-changed-ed004f95-f0d0-434e-a13d-54bff688d74e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 832.320310] env[69475]: DEBUG nova.compute.manager [req-ec131bbd-f85a-46eb-a7a0-cbf68a7d87ca req-46864756-928e-4717-a892-bdd194698d19 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Refreshing instance network info cache due to event network-changed-ed004f95-f0d0-434e-a13d-54bff688d74e. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 832.320548] env[69475]: DEBUG oslo_concurrency.lockutils [req-ec131bbd-f85a-46eb-a7a0-cbf68a7d87ca req-46864756-928e-4717-a892-bdd194698d19 service nova] Acquiring lock "refresh_cache-8d50b322-fa03-4e48-b74b-a63578e4701c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.320675] env[69475]: DEBUG oslo_concurrency.lockutils [req-ec131bbd-f85a-46eb-a7a0-cbf68a7d87ca req-46864756-928e-4717-a892-bdd194698d19 service nova] Acquired lock "refresh_cache-8d50b322-fa03-4e48-b74b-a63578e4701c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.320833] env[69475]: DEBUG nova.network.neutron [req-ec131bbd-f85a-46eb-a7a0-cbf68a7d87ca req-46864756-928e-4717-a892-bdd194698d19 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Refreshing network info cache for port ed004f95-f0d0-434e-a13d-54bff688d74e {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 832.418445] env[69475]: DEBUG oslo_concurrency.lockutils [None req-785701ec-35cb-4feb-af8d-cd1604ce81da tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "3fba85c9-7798-4a66-b335-21f80962e0bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.888s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.470629] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521747d9-000c-5247-b166-122617154c9a, 'name': SearchDatastore_Task, 'duration_secs': 0.01176} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.470629] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.470629] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 86647493-8b2c-46bd-94d3-c973e843f778/86647493-8b2c-46bd-94d3-c973e843f778.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 832.470767] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ec8d5c3-993a-4d19-92a6-0e3722372d7b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.478621] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 832.478621] env[69475]: value = "task-3508162" [ 832.478621] env[69475]: _type = "Task" [ 832.478621] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.485273] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "e960f967-d693-4ea8-9390-8b0232941c58" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.486557] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "e960f967-d693-4ea8-9390-8b0232941c58" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.486557] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "e960f967-d693-4ea8-9390-8b0232941c58-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.486557] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "e960f967-d693-4ea8-9390-8b0232941c58-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.486557] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "e960f967-d693-4ea8-9390-8b0232941c58-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.487902] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508162, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.490131] env[69475]: INFO nova.compute.manager [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Terminating instance [ 832.591068] env[69475]: DEBUG oslo_concurrency.lockutils [req-64199eb1-1ac4-4f4a-a5a4-bcefbd15b97b req-23d43afd-483d-4da6-90f4-d4a54573e3dd service nova] Releasing lock "refresh_cache-5e3e57c5-8367-493f-8268-a0e496c8c878" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.689861] env[69475]: DEBUG oslo_concurrency.lockutils [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "refresh_cache-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.694280] env[69475]: DEBUG nova.compute.manager [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Instance network_info: |[{"id": "30f10284-138a-4774-b024-33ffa906ef81", "address": "fa:16:3e:49:ea:cb", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30f10284-13", "ovs_interfaceid": "30f10284-138a-4774-b024-33ffa906ef81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 832.695080] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:ea:cb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4954661-ff70-43dd-bc60-8cbca6b9cbfa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30f10284-138a-4774-b024-33ffa906ef81', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 832.702827] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Creating folder: Project (b2ba1a4125454d39bc92b6123447d98a). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 832.703291] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1765cc8-62c8-4b12-836b-2a17fc0a2727 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.717514] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Created folder: Project (b2ba1a4125454d39bc92b6123447d98a) in parent group-v700823. [ 832.717729] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Creating folder: Instances. Parent ref: group-v700999. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 832.717989] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dacfaa0e-123e-4a39-a142-2028e3a33813 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.724895] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 832.725191] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e022456b-ff89-4be9-af2d-0a4e1871e695 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.731464] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Created folder: Instances in parent group-v700999. [ 832.731717] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 832.733025] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 832.733389] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 832.733389] env[69475]: value = "task-3508165" [ 832.733389] env[69475]: _type = "Task" [ 832.733389] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.733586] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bccf0df4-b470-406f-bf3b-1fe4535a2637 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.761348] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508165, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.762835] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 832.762835] env[69475]: value = "task-3508166" [ 832.762835] env[69475]: _type = "Task" [ 832.762835] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.778195] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508166, 'name': CreateVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.917704] env[69475]: DEBUG nova.compute.manager [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Received event network-vif-plugged-30f10284-138a-4774-b024-33ffa906ef81 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 832.917939] env[69475]: DEBUG oslo_concurrency.lockutils [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] Acquiring lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 832.918181] env[69475]: DEBUG oslo_concurrency.lockutils [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] Lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 832.918753] env[69475]: DEBUG oslo_concurrency.lockutils [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] Lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.918753] env[69475]: DEBUG nova.compute.manager [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] No waiting events found dispatching network-vif-plugged-30f10284-138a-4774-b024-33ffa906ef81 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 832.918753] env[69475]: WARNING nova.compute.manager [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Received unexpected event network-vif-plugged-30f10284-138a-4774-b024-33ffa906ef81 for instance with vm_state building and task_state spawning. [ 832.918930] env[69475]: DEBUG nova.compute.manager [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Received event network-changed-30f10284-138a-4774-b024-33ffa906ef81 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 832.919025] env[69475]: DEBUG nova.compute.manager [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Refreshing instance network info cache due to event network-changed-30f10284-138a-4774-b024-33ffa906ef81. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 832.919167] env[69475]: DEBUG oslo_concurrency.lockutils [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] Acquiring lock "refresh_cache-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.919290] env[69475]: DEBUG oslo_concurrency.lockutils [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] Acquired lock "refresh_cache-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.919446] env[69475]: DEBUG nova.network.neutron [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Refreshing network info cache for port 30f10284-138a-4774-b024-33ffa906ef81 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 832.937784] env[69475]: DEBUG nova.network.neutron [-] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.943440] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1010b716-d81b-48bf-b682-a8b39f5792f8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.959629] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d324774-e3dc-4984-9daa-cb4f8beeea33 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.999614] env[69475]: DEBUG nova.compute.manager [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 833.000093] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 833.007543] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e340a477-b4ae-4d4b-8ad5-e20d6d9a2b22 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.009503] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d322e78-1089-412d-a917-41f4fe4326f4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.018549] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508162, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.022506] env[69475]: DEBUG oslo_vmware.api [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 833.022506] env[69475]: value = "task-3508167" [ 833.022506] env[69475]: _type = "Task" [ 833.022506] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.023894] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41bbbfce-4815-46bd-b4b9-5ba37ba58258 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.043184] env[69475]: DEBUG nova.compute.provider_tree [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.050170] env[69475]: DEBUG oslo_vmware.api [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508167, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.151271] env[69475]: DEBUG nova.network.neutron [req-ec131bbd-f85a-46eb-a7a0-cbf68a7d87ca req-46864756-928e-4717-a892-bdd194698d19 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Updated VIF entry in instance network info cache for port ed004f95-f0d0-434e-a13d-54bff688d74e. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 833.151651] env[69475]: DEBUG nova.network.neutron [req-ec131bbd-f85a-46eb-a7a0-cbf68a7d87ca req-46864756-928e-4717-a892-bdd194698d19 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Updating instance_info_cache with network_info: [{"id": "ed004f95-f0d0-434e-a13d-54bff688d74e", "address": "fa:16:3e:3d:0c:05", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped004f95-f0", "ovs_interfaceid": "ed004f95-f0d0-434e-a13d-54bff688d74e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.260113] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508165, 'name': PowerOffVM_Task, 'duration_secs': 0.316919} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.260419] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 833.261146] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 833.262229] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d5f03c6-83ed-4442-a62a-b796a62194eb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.273996] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 833.279431] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d56a5303-a0c6-4045-9317-a2b690e3bafd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.281595] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508166, 'name': CreateVM_Task, 'duration_secs': 0.482858} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.281813] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 833.283784] env[69475]: DEBUG oslo_concurrency.lockutils [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.283961] env[69475]: DEBUG oslo_concurrency.lockutils [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.284298] env[69475]: DEBUG oslo_concurrency.lockutils [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 833.284542] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5d370ba-5e7b-4906-8dab-0890fb72437d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.292157] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 833.292157] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d39700-452b-92ed-fbbf-cbc26b6d4c86" [ 833.292157] env[69475]: _type = "Task" [ 833.292157] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.302203] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d39700-452b-92ed-fbbf-cbc26b6d4c86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.350173] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 833.350173] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 833.350173] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Deleting the datastore file [datastore1] 235653ac-a893-4f42-a394-dd81f61f0d73 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 833.350173] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-051c9bc4-5bc2-443c-9ce2-3209cfb1779a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.356630] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 833.356630] env[69475]: value = "task-3508169" [ 833.356630] env[69475]: _type = "Task" [ 833.356630] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.366528] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508169, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.440694] env[69475]: INFO nova.compute.manager [-] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Took 1.25 seconds to deallocate network for instance. [ 833.509735] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508162, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.550494} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.510024] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 86647493-8b2c-46bd-94d3-c973e843f778/86647493-8b2c-46bd-94d3-c973e843f778.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 833.510243] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 833.510491] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c70b2648-cf6b-4fdc-9ece-37ca1cc2c202 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.516054] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 833.516054] env[69475]: value = "task-3508170" [ 833.516054] env[69475]: _type = "Task" [ 833.516054] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.523869] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508170, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.532542] env[69475]: DEBUG oslo_vmware.api [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508167, 'name': PowerOffVM_Task, 'duration_secs': 0.215313} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.534987] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 833.535861] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 833.535861] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700995', 'volume_id': 'df29d466-8554-4613-aa48-1be45554c1b5', 'name': 'volume-df29d466-8554-4613-aa48-1be45554c1b5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e960f967-d693-4ea8-9390-8b0232941c58', 'attached_at': '', 'detached_at': '', 'volume_id': 'df29d466-8554-4613-aa48-1be45554c1b5', 'serial': 'df29d466-8554-4613-aa48-1be45554c1b5'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 833.536181] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f478d77e-c128-47e9-9487-c8c265e63cae {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.557398] env[69475]: DEBUG nova.scheduler.client.report [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 833.564146] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faad6dd3-1d09-4a50-9ad5-f60ff4320df2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.577883] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91d8da35-0d4c-42ea-bcf2-69bdf6433ebb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.598864] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a11776c-8a9e-4447-bcee-cf09907bbbab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.615768] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] The volume has not been displaced from its original location: [datastore1] volume-df29d466-8554-4613-aa48-1be45554c1b5/volume-df29d466-8554-4613-aa48-1be45554c1b5.vmdk. No consolidation needed. {{(pid=69475) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 833.622315] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Reconfiguring VM instance instance-00000037 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 833.623607] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c638d281-242a-48ac-a385-49f799d6ef45 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.643195] env[69475]: DEBUG oslo_vmware.api [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 833.643195] env[69475]: value = "task-3508171" [ 833.643195] env[69475]: _type = "Task" [ 833.643195] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.651222] env[69475]: DEBUG oslo_vmware.api [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508171, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.654805] env[69475]: DEBUG oslo_concurrency.lockutils [req-ec131bbd-f85a-46eb-a7a0-cbf68a7d87ca req-46864756-928e-4717-a892-bdd194698d19 service nova] Releasing lock "refresh_cache-8d50b322-fa03-4e48-b74b-a63578e4701c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.655058] env[69475]: DEBUG nova.compute.manager [req-ec131bbd-f85a-46eb-a7a0-cbf68a7d87ca req-46864756-928e-4717-a892-bdd194698d19 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Received event network-changed-5abe617e-d18a-416f-8c40-d0da33a563d2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 833.655258] env[69475]: DEBUG nova.compute.manager [req-ec131bbd-f85a-46eb-a7a0-cbf68a7d87ca req-46864756-928e-4717-a892-bdd194698d19 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Refreshing instance network info cache due to event network-changed-5abe617e-d18a-416f-8c40-d0da33a563d2. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 833.655473] env[69475]: DEBUG oslo_concurrency.lockutils [req-ec131bbd-f85a-46eb-a7a0-cbf68a7d87ca req-46864756-928e-4717-a892-bdd194698d19 service nova] Acquiring lock "refresh_cache-b41845c6-46bd-4b3b-ab26-d7d2dad08f84" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.655614] env[69475]: DEBUG oslo_concurrency.lockutils [req-ec131bbd-f85a-46eb-a7a0-cbf68a7d87ca req-46864756-928e-4717-a892-bdd194698d19 service nova] Acquired lock "refresh_cache-b41845c6-46bd-4b3b-ab26-d7d2dad08f84" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.655773] env[69475]: DEBUG nova.network.neutron [req-ec131bbd-f85a-46eb-a7a0-cbf68a7d87ca req-46864756-928e-4717-a892-bdd194698d19 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Refreshing network info cache for port 5abe617e-d18a-416f-8c40-d0da33a563d2 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 833.671694] env[69475]: DEBUG nova.network.neutron [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Updated VIF entry in instance network info cache for port 30f10284-138a-4774-b024-33ffa906ef81. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 833.672072] env[69475]: DEBUG nova.network.neutron [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Updating instance_info_cache with network_info: [{"id": "30f10284-138a-4774-b024-33ffa906ef81", "address": "fa:16:3e:49:ea:cb", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30f10284-13", "ovs_interfaceid": "30f10284-138a-4774-b024-33ffa906ef81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 833.803640] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d39700-452b-92ed-fbbf-cbc26b6d4c86, 'name': SearchDatastore_Task, 'duration_secs': 0.010119} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.803949] env[69475]: DEBUG oslo_concurrency.lockutils [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.804464] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.804549] env[69475]: DEBUG oslo_concurrency.lockutils [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.804690] env[69475]: DEBUG oslo_concurrency.lockutils [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.804917] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 833.805247] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c1636ddf-e768-4c28-9e02-e5d6121ebbdc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.813645] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.813645] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 833.814361] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e95b7056-b4a2-4c7f-8c47-147542a00c5e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.820033] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 833.820033] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5243796d-a684-6d9b-5050-42c15494453d" [ 833.820033] env[69475]: _type = "Task" [ 833.820033] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.828335] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5243796d-a684-6d9b-5050-42c15494453d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.867147] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508169, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146121} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.867399] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 833.867579] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 833.867754] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 833.948268] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.026646] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508170, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070525} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.027258] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 834.027719] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bdceaad-b35b-416a-8b30-0b4eade231e7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.051501] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 86647493-8b2c-46bd-94d3-c973e843f778/86647493-8b2c-46bd-94d3-c973e843f778.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 834.051501] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9902eec6-b613-488b-a6ec-e2aa155cedde {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.069273] env[69475]: DEBUG oslo_concurrency.lockutils [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.188s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.072819] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.515s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.073082] env[69475]: DEBUG nova.objects.instance [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lazy-loading 'resources' on Instance uuid 25c44ae0-4193-4833-85ec-ebc0ef3cf593 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 834.074521] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Acquiring lock "8bea34ef-0caf-4cdb-a689-dd747d9b52ea" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.074737] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Lock "8bea34ef-0caf-4cdb-a689-dd747d9b52ea" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.074933] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Acquiring lock "8bea34ef-0caf-4cdb-a689-dd747d9b52ea-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.075174] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Lock "8bea34ef-0caf-4cdb-a689-dd747d9b52ea-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.075352] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Lock "8bea34ef-0caf-4cdb-a689-dd747d9b52ea-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.076899] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 834.076899] env[69475]: value = "task-3508172" [ 834.076899] env[69475]: _type = "Task" [ 834.076899] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.077343] env[69475]: INFO nova.compute.manager [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Terminating instance [ 834.089092] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508172, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.099022] env[69475]: INFO nova.scheduler.client.report [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Deleted allocations for instance 420ecc09-60c8-4a14-8504-d11d760ddbb4 [ 834.153098] env[69475]: DEBUG oslo_vmware.api [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508171, 'name': ReconfigVM_Task, 'duration_secs': 0.225862} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.154166] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Reconfigured VM instance instance-00000037 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 834.161103] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-81f0577f-5c69-4a7f-94db-353382695c03 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.174455] env[69475]: DEBUG oslo_concurrency.lockutils [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] Releasing lock "refresh_cache-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.174699] env[69475]: DEBUG nova.compute.manager [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Received event network-changed-3041b80e-1b4f-454f-92b6-d002b52423b5 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 834.174863] env[69475]: DEBUG nova.compute.manager [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Refreshing instance network info cache due to event network-changed-3041b80e-1b4f-454f-92b6-d002b52423b5. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 834.175079] env[69475]: DEBUG oslo_concurrency.lockutils [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] Acquiring lock "refresh_cache-8cc0636c-84af-4f68-bec8-1493b421a605" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.175301] env[69475]: DEBUG oslo_concurrency.lockutils [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] Acquired lock "refresh_cache-8cc0636c-84af-4f68-bec8-1493b421a605" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.175404] env[69475]: DEBUG nova.network.neutron [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Refreshing network info cache for port 3041b80e-1b4f-454f-92b6-d002b52423b5 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 834.178536] env[69475]: DEBUG oslo_vmware.api [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 834.178536] env[69475]: value = "task-3508173" [ 834.178536] env[69475]: _type = "Task" [ 834.178536] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.187880] env[69475]: DEBUG oslo_vmware.api [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508173, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.329712] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5243796d-a684-6d9b-5050-42c15494453d, 'name': SearchDatastore_Task, 'duration_secs': 0.010397} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.330702] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85a06ea1-0366-438c-a6e8-86c540b42270 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.337926] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 834.337926] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526c3659-923b-e696-993f-0eae32e6e94e" [ 834.337926] env[69475]: _type = "Task" [ 834.337926] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.345556] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526c3659-923b-e696-993f-0eae32e6e94e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.347851] env[69475]: DEBUG nova.compute.manager [req-597e452b-5c4d-4940-95b7-f6c9e620c5e6 req-c8650a14-3caf-4fce-aa6b-a8fd7f5982b9 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Received event network-changed-5abe617e-d18a-416f-8c40-d0da33a563d2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 834.347943] env[69475]: DEBUG nova.compute.manager [req-597e452b-5c4d-4940-95b7-f6c9e620c5e6 req-c8650a14-3caf-4fce-aa6b-a8fd7f5982b9 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Refreshing instance network info cache due to event network-changed-5abe617e-d18a-416f-8c40-d0da33a563d2. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 834.348777] env[69475]: DEBUG oslo_concurrency.lockutils [req-597e452b-5c4d-4940-95b7-f6c9e620c5e6 req-c8650a14-3caf-4fce-aa6b-a8fd7f5982b9 service nova] Acquiring lock "refresh_cache-b41845c6-46bd-4b3b-ab26-d7d2dad08f84" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.348777] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "5e3e57c5-8367-493f-8268-a0e496c8c878" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.370428] env[69475]: DEBUG nova.network.neutron [req-ec131bbd-f85a-46eb-a7a0-cbf68a7d87ca req-46864756-928e-4717-a892-bdd194698d19 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Updated VIF entry in instance network info cache for port 5abe617e-d18a-416f-8c40-d0da33a563d2. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 834.370790] env[69475]: DEBUG nova.network.neutron [req-ec131bbd-f85a-46eb-a7a0-cbf68a7d87ca req-46864756-928e-4717-a892-bdd194698d19 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Updating instance_info_cache with network_info: [{"id": "5abe617e-d18a-416f-8c40-d0da33a563d2", "address": "fa:16:3e:8a:cb:f9", "network": {"id": "4ef94f8a-061f-46ae-9a81-2ea0b3db007a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1623741139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "6fe52710b9d1461ea46698c9cf7bafb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad36dd36-1d2c-4f37-a259-98ef2e440794", "external-id": "nsx-vlan-transportzone-479", "segmentation_id": 479, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5abe617e-d1", "ovs_interfaceid": "5abe617e-d18a-416f-8c40-d0da33a563d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.586341] env[69475]: DEBUG nova.compute.manager [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 834.586594] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 834.586859] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d7813f23-5a01-4779-84c6-60496ea7faea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.596272] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508172, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.596272] env[69475]: DEBUG oslo_vmware.api [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Waiting for the task: (returnval){ [ 834.596272] env[69475]: value = "task-3508174" [ 834.596272] env[69475]: _type = "Task" [ 834.596272] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.605190] env[69475]: DEBUG oslo_vmware.api [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3508174, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.610489] env[69475]: DEBUG oslo_concurrency.lockutils [None req-50701eea-e2df-4670-b967-0ded1e78c55a tempest-MultipleCreateTestJSON-852249463 tempest-MultipleCreateTestJSON-852249463-project-member] Lock "420ecc09-60c8-4a14-8504-d11d760ddbb4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.207s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.692983] env[69475]: DEBUG oslo_vmware.api [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508173, 'name': ReconfigVM_Task, 'duration_secs': 0.183727} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.693349] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700995', 'volume_id': 'df29d466-8554-4613-aa48-1be45554c1b5', 'name': 'volume-df29d466-8554-4613-aa48-1be45554c1b5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e960f967-d693-4ea8-9390-8b0232941c58', 'attached_at': '', 'detached_at': '', 'volume_id': 'df29d466-8554-4613-aa48-1be45554c1b5', 'serial': 'df29d466-8554-4613-aa48-1be45554c1b5'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 834.693655] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 834.694447] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1409695b-e932-4293-b66b-f45054e59eee {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.701527] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 834.701766] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3931f759-aca2-41a9-b218-d054e30b0efd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.763151] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 834.763309] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 834.763501] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleting the datastore file [datastore1] e960f967-d693-4ea8-9390-8b0232941c58 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 834.763757] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a912af15-1f3a-46a6-bed7-ff31ea48d3e6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.770622] env[69475]: DEBUG oslo_vmware.api [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 834.770622] env[69475]: value = "task-3508176" [ 834.770622] env[69475]: _type = "Task" [ 834.770622] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.778842] env[69475]: DEBUG oslo_vmware.api [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508176, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.850420] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526c3659-923b-e696-993f-0eae32e6e94e, 'name': SearchDatastore_Task, 'duration_secs': 0.014703} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.853043] env[69475]: DEBUG oslo_concurrency.lockutils [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.853408] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0/f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 834.853868] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9d5d7cc-5231-45ec-8abc-49e6bb25ea43 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.860246] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 834.860246] env[69475]: value = "task-3508177" [ 834.860246] env[69475]: _type = "Task" [ 834.860246] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.868284] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508177, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.877209] env[69475]: DEBUG oslo_concurrency.lockutils [req-ec131bbd-f85a-46eb-a7a0-cbf68a7d87ca req-46864756-928e-4717-a892-bdd194698d19 service nova] Releasing lock "refresh_cache-b41845c6-46bd-4b3b-ab26-d7d2dad08f84" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.880706] env[69475]: DEBUG oslo_concurrency.lockutils [req-597e452b-5c4d-4940-95b7-f6c9e620c5e6 req-c8650a14-3caf-4fce-aa6b-a8fd7f5982b9 service nova] Acquired lock "refresh_cache-b41845c6-46bd-4b3b-ab26-d7d2dad08f84" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 834.881189] env[69475]: DEBUG nova.network.neutron [req-597e452b-5c4d-4940-95b7-f6c9e620c5e6 req-c8650a14-3caf-4fce-aa6b-a8fd7f5982b9 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Refreshing network info cache for port 5abe617e-d18a-416f-8c40-d0da33a563d2 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 834.912338] env[69475]: DEBUG nova.virt.hardware [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 834.912640] env[69475]: DEBUG nova.virt.hardware [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 834.912840] env[69475]: DEBUG nova.virt.hardware [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 834.913097] env[69475]: DEBUG nova.virt.hardware [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 834.913296] env[69475]: DEBUG nova.virt.hardware [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 834.913522] env[69475]: DEBUG nova.virt.hardware [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 834.913761] env[69475]: DEBUG nova.virt.hardware [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 834.913950] env[69475]: DEBUG nova.virt.hardware [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 834.914191] env[69475]: DEBUG nova.virt.hardware [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 834.914408] env[69475]: DEBUG nova.virt.hardware [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 834.914606] env[69475]: DEBUG nova.virt.hardware [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 834.915899] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5282a11f-6994-4e79-b05f-9ac6e70db619 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.928702] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d4de88-4f66-451e-b914-6269a66d0565 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.943449] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:13:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '31ac3fea-ebf4-4bed-bf70-1eaecdf71280', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e51856c-7355-448c-82fc-e5af23bb0fcf', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 834.951050] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 834.953696] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 834.954121] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d54a55e-3cb7-483c-b41f-1e4d1955ce7a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.971259] env[69475]: DEBUG nova.network.neutron [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Updated VIF entry in instance network info cache for port 3041b80e-1b4f-454f-92b6-d002b52423b5. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 834.971657] env[69475]: DEBUG nova.network.neutron [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Updating instance_info_cache with network_info: [{"id": "3041b80e-1b4f-454f-92b6-d002b52423b5", "address": "fa:16:3e:8f:51:41", "network": {"id": "fdf12f35-f15a-4e19-8404-a57b06812497", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-417366095-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.220", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5d454d98dea429da9c2cc9300ed9573", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "411f389f-4e4f-4450-891e-38944cac6135", "external-id": "nsx-vlan-transportzone-795", "segmentation_id": 795, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3041b80e-1b", "ovs_interfaceid": "3041b80e-1b4f-454f-92b6-d002b52423b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.979824] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 834.979824] env[69475]: value = "task-3508178" [ 834.979824] env[69475]: _type = "Task" [ 834.979824] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.986264] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508178, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.097583] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508172, 'name': ReconfigVM_Task, 'duration_secs': 0.595369} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.102190] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 86647493-8b2c-46bd-94d3-c973e843f778/86647493-8b2c-46bd-94d3-c973e843f778.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 835.102190] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a05d62e4-831d-4d35-8e36-657dbad55b71 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.109740] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cad3cd1-7a9a-4a31-b1d9-5b9354e81aaf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.118280] env[69475]: DEBUG oslo_vmware.api [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3508174, 'name': PowerOffVM_Task, 'duration_secs': 0.246843} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.118607] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 835.118607] env[69475]: value = "task-3508179" [ 835.118607] env[69475]: _type = "Task" [ 835.118607] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.119362] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 835.119600] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 835.120519] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700903', 'volume_id': 'f89046dd-6d18-4fc2-bdc5-f7976aa2861d', 'name': 'volume-f89046dd-6d18-4fc2-bdc5-f7976aa2861d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8bea34ef-0caf-4cdb-a689-dd747d9b52ea', 'attached_at': '', 'detached_at': '', 'volume_id': 'f89046dd-6d18-4fc2-bdc5-f7976aa2861d', 'serial': 'f89046dd-6d18-4fc2-bdc5-f7976aa2861d'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 835.121046] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c60282-bad0-49cd-a763-d695f467ae14 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.132480] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9cebcc8-52c1-4dda-98d8-689395c8f863 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.152779] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508179, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.154113] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f76f3e4-b8d5-41c4-bdfc-98fcf4d4240b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.189551] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7ae051-39ed-4a31-9752-ce681c360f3b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.196967] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd65594-4d32-42b1-ad28-53c725cec378 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.205099] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf314ef-1505-42c7-8af3-549bd40840df {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.232104] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9413a8e8-6569-4a32-a28e-a767633074e7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.245675] env[69475]: DEBUG nova.compute.provider_tree [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.264172] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] The volume has not been displaced from its original location: [datastore1] volume-f89046dd-6d18-4fc2-bdc5-f7976aa2861d/volume-f89046dd-6d18-4fc2-bdc5-f7976aa2861d.vmdk. No consolidation needed. {{(pid=69475) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 835.269779] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Reconfiguring VM instance instance-0000002f to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 835.270887] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c5bf539-13aa-433b-a062-d98fd2f5d938 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.294104] env[69475]: DEBUG oslo_vmware.api [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508176, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162939} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.295578] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 835.295788] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 835.295995] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 835.296207] env[69475]: INFO nova.compute.manager [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Took 2.30 seconds to destroy the instance on the hypervisor. [ 835.296455] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 835.296743] env[69475]: DEBUG oslo_vmware.api [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Waiting for the task: (returnval){ [ 835.296743] env[69475]: value = "task-3508180" [ 835.296743] env[69475]: _type = "Task" [ 835.296743] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.296946] env[69475]: DEBUG nova.compute.manager [-] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 835.297059] env[69475]: DEBUG nova.network.neutron [-] [instance: e960f967-d693-4ea8-9390-8b0232941c58] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 835.311571] env[69475]: DEBUG oslo_vmware.api [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3508180, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.384037] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508177, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479993} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.384037] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0/f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 835.384037] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 835.384037] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3c70c1ee-68f5-48e5-841d-a6635891f3e4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.390868] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 835.390868] env[69475]: value = "task-3508181" [ 835.390868] env[69475]: _type = "Task" [ 835.390868] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.402743] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508181, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.476256] env[69475]: DEBUG oslo_concurrency.lockutils [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] Releasing lock "refresh_cache-8cc0636c-84af-4f68-bec8-1493b421a605" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 835.476256] env[69475]: DEBUG nova.compute.manager [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Received event network-vif-deleted-60c1a160-2445-460f-a1ab-ee86bd91a07c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 835.476256] env[69475]: INFO nova.compute.manager [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Neutron deleted interface 60c1a160-2445-460f-a1ab-ee86bd91a07c; detaching it from the instance and deleting it from the info cache [ 835.476256] env[69475]: DEBUG nova.network.neutron [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.489771] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508178, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.630392] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508179, 'name': Rename_Task, 'duration_secs': 0.196601} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.630694] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 835.630898] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-17880443-f69c-42b5-8735-03bdcb35b3d0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.637696] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 835.637696] env[69475]: value = "task-3508182" [ 835.637696] env[69475]: _type = "Task" [ 835.637696] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.646345] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508182, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.751393] env[69475]: DEBUG nova.scheduler.client.report [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 835.809114] env[69475]: DEBUG oslo_vmware.api [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3508180, 'name': ReconfigVM_Task, 'duration_secs': 0.174938} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.809374] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Reconfigured VM instance instance-0000002f to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 835.814393] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec8952a2-cd8e-4727-97bb-1a2b764644a0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.835380] env[69475]: DEBUG oslo_vmware.api [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Waiting for the task: (returnval){ [ 835.835380] env[69475]: value = "task-3508183" [ 835.835380] env[69475]: _type = "Task" [ 835.835380] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.846476] env[69475]: DEBUG oslo_vmware.api [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3508183, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.868619] env[69475]: DEBUG nova.network.neutron [req-597e452b-5c4d-4940-95b7-f6c9e620c5e6 req-c8650a14-3caf-4fce-aa6b-a8fd7f5982b9 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Updated VIF entry in instance network info cache for port 5abe617e-d18a-416f-8c40-d0da33a563d2. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 835.868985] env[69475]: DEBUG nova.network.neutron [req-597e452b-5c4d-4940-95b7-f6c9e620c5e6 req-c8650a14-3caf-4fce-aa6b-a8fd7f5982b9 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Updating instance_info_cache with network_info: [{"id": "5abe617e-d18a-416f-8c40-d0da33a563d2", "address": "fa:16:3e:8a:cb:f9", "network": {"id": "4ef94f8a-061f-46ae-9a81-2ea0b3db007a", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1623741139-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.2", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.3"}}], "meta": {"injected": false, "tenant_id": "6fe52710b9d1461ea46698c9cf7bafb2", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ad36dd36-1d2c-4f37-a259-98ef2e440794", "external-id": "nsx-vlan-transportzone-479", "segmentation_id": 479, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5abe617e-d1", "ovs_interfaceid": "5abe617e-d18a-416f-8c40-d0da33a563d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.899726] env[69475]: DEBUG nova.compute.manager [req-9ecea2b0-309c-4e7a-a2b1-fafd48d65274 req-f77a78fb-e6bf-4580-b19f-4eac221efb85 service nova] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Received event network-vif-deleted-9a80c54f-962d-4eb2-a41a-ff95882d56a3 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 835.899967] env[69475]: INFO nova.compute.manager [req-9ecea2b0-309c-4e7a-a2b1-fafd48d65274 req-f77a78fb-e6bf-4580-b19f-4eac221efb85 service nova] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Neutron deleted interface 9a80c54f-962d-4eb2-a41a-ff95882d56a3; detaching it from the instance and deleting it from the info cache [ 835.900136] env[69475]: DEBUG nova.network.neutron [req-9ecea2b0-309c-4e7a-a2b1-fafd48d65274 req-f77a78fb-e6bf-4580-b19f-4eac221efb85 service nova] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.908778] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508181, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068032} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.909050] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 835.909869] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8397345-88c8-46d4-b548-424ea0a663dc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.935347] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Reconfiguring VM instance instance-0000003c to attach disk [datastore2] f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0/f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 835.935954] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-741a8412-1fa9-4b17-8b5b-962feeb2aa63 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.956194] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 835.956194] env[69475]: value = "task-3508184" [ 835.956194] env[69475]: _type = "Task" [ 835.956194] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.964289] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508184, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.981514] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-174a8e43-b282-4d59-851e-5fe5bad02502 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.992294] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508178, 'name': CreateVM_Task, 'duration_secs': 0.514011} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.993503] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 835.994426] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.994600] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 835.994943] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 835.999094] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60da4d42-056c-4b9f-9a7d-64af9ee80751 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.013515] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7acd429c-1550-417f-a343-a1698d4f1c27 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.019494] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 836.019494] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e88271-294f-18e4-d356-cc67c7e7ed10" [ 836.019494] env[69475]: _type = "Task" [ 836.019494] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.028429] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e88271-294f-18e4-d356-cc67c7e7ed10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.041548] env[69475]: DEBUG nova.compute.manager [req-0b191b78-a641-4cfc-9608-895229a4846e req-981b0030-ebca-468e-a76c-1af3127810a3 service nova] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Detach interface failed, port_id=60c1a160-2445-460f-a1ab-ee86bd91a07c, reason: Instance 5e3e57c5-8367-493f-8268-a0e496c8c878 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 836.147645] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508182, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.257462] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.185s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.259916] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 34.567s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.261305] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.262617] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69475) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 836.262617] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.827s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 836.262617] env[69475]: DEBUG nova.objects.instance [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Lazy-loading 'resources' on Instance uuid 41c23568-c8d7-4d6c-8cc4-a94c95b3223a {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 836.264456] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3917b8fa-2a0d-478f-9153-e668d4065bc9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.274346] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a488f468-6d1a-474e-ba5c-68c9e3e15bc9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.290104] env[69475]: INFO nova.scheduler.client.report [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Deleted allocations for instance 25c44ae0-4193-4833-85ec-ebc0ef3cf593 [ 836.291525] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd9dfe7-5007-4ffc-a88e-45b592962733 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.307696] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aae01f6-d0bd-439a-a75f-ba2353ee2aaa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.343932] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178328MB free_disk=89GB free_vcpus=48 pci_devices=None {{(pid=69475) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 836.344020] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.354171] env[69475]: DEBUG oslo_vmware.api [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3508183, 'name': ReconfigVM_Task, 'duration_secs': 0.223046} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.354171] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700903', 'volume_id': 'f89046dd-6d18-4fc2-bdc5-f7976aa2861d', 'name': 'volume-f89046dd-6d18-4fc2-bdc5-f7976aa2861d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8bea34ef-0caf-4cdb-a689-dd747d9b52ea', 'attached_at': '', 'detached_at': '', 'volume_id': 'f89046dd-6d18-4fc2-bdc5-f7976aa2861d', 'serial': 'f89046dd-6d18-4fc2-bdc5-f7976aa2861d'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 836.354340] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 836.355016] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05da7072-34a6-4bcb-bb2b-d5c7596971df {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.362744] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 836.362744] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-de63bb02-5996-4996-99b2-542f802eab28 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.371840] env[69475]: DEBUG oslo_concurrency.lockutils [req-597e452b-5c4d-4940-95b7-f6c9e620c5e6 req-c8650a14-3caf-4fce-aa6b-a8fd7f5982b9 service nova] Releasing lock "refresh_cache-b41845c6-46bd-4b3b-ab26-d7d2dad08f84" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 836.383686] env[69475]: DEBUG nova.network.neutron [-] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.403481] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-00facc05-da67-4aaa-b3d6-27dca056813b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.413757] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35b840f8-ad5f-4a73-b898-7753b4fce434 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.431017] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 836.431017] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 836.431017] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Deleting the datastore file [datastore1] 8bea34ef-0caf-4cdb-a689-dd747d9b52ea {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 836.431017] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99761001-03d3-4c2d-b4d8-8d84024760ae {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.434464] env[69475]: DEBUG oslo_vmware.api [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Waiting for the task: (returnval){ [ 836.434464] env[69475]: value = "task-3508186" [ 836.434464] env[69475]: _type = "Task" [ 836.434464] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.452452] env[69475]: DEBUG nova.compute.manager [req-9ecea2b0-309c-4e7a-a2b1-fafd48d65274 req-f77a78fb-e6bf-4580-b19f-4eac221efb85 service nova] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Detach interface failed, port_id=9a80c54f-962d-4eb2-a41a-ff95882d56a3, reason: Instance e960f967-d693-4ea8-9390-8b0232941c58 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 836.457831] env[69475]: DEBUG oslo_vmware.api [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3508186, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.466633] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508184, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.530596] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e88271-294f-18e4-d356-cc67c7e7ed10, 'name': SearchDatastore_Task, 'duration_secs': 0.009061} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.530943] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 836.531217] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 836.531486] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.531636] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.531836] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 836.532154] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-486222f4-6f8d-417b-92fe-233da1d92e43 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.540988] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 836.541197] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 836.542030] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f3f4881-914c-42c9-a990-3578dd198394 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.547893] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 836.547893] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52458262-cdff-1f74-593e-cf6d7dfefc6e" [ 836.547893] env[69475]: _type = "Task" [ 836.547893] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.557214] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52458262-cdff-1f74-593e-cf6d7dfefc6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.647526] env[69475]: DEBUG oslo_vmware.api [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508182, 'name': PowerOnVM_Task, 'duration_secs': 0.982184} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.647864] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 836.648049] env[69475]: INFO nova.compute.manager [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Took 10.45 seconds to spawn the instance on the hypervisor. [ 836.648249] env[69475]: DEBUG nova.compute.manager [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 836.649054] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8365d8ab-4c88-4c60-bc49-3b1102a711a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.810440] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b32c67f0-2614-41dd-9295-b79db93626ae tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "25c44ae0-4193-4833-85ec-ebc0ef3cf593" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.704s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.885630] env[69475]: INFO nova.compute.manager [-] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Took 1.59 seconds to deallocate network for instance. [ 836.952688] env[69475]: DEBUG oslo_vmware.api [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Task: {'id': task-3508186, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125062} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.953506] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 836.953506] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 836.953506] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 836.953652] env[69475]: INFO nova.compute.manager [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Took 2.37 seconds to destroy the instance on the hypervisor. [ 836.954231] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 836.954231] env[69475]: DEBUG nova.compute.manager [-] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 836.954231] env[69475]: DEBUG nova.network.neutron [-] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 836.968256] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508184, 'name': ReconfigVM_Task, 'duration_secs': 0.802559} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.968538] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Reconfigured VM instance instance-0000003c to attach disk [datastore2] f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0/f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 836.969333] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-03001117-3d08-4a83-a7a8-b0f062538e10 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.979085] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 836.979085] env[69475]: value = "task-3508187" [ 836.979085] env[69475]: _type = "Task" [ 836.979085] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.989825] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508187, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.064757] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52458262-cdff-1f74-593e-cf6d7dfefc6e, 'name': SearchDatastore_Task, 'duration_secs': 0.014769} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.065633] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3207dc14-da0c-4b52-9bb3-aafd0e6b0a81 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.074938] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 837.074938] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f0ca7-628a-0320-17c2-743dae1382ba" [ 837.074938] env[69475]: _type = "Task" [ 837.074938] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.083618] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f0ca7-628a-0320-17c2-743dae1382ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.169979] env[69475]: INFO nova.compute.manager [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Took 41.33 seconds to build instance. [ 837.340145] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd58ce0-a92a-4051-8aaf-3ff9518417ce {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.350422] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519f6a8a-603a-4068-9070-b25130160db0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.388656] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f937b2e-4c4b-498c-bd05-e11c491deb44 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.395974] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7f24e7e-7c8b-43a9-b0de-2ecd71dd7c56 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.409613] env[69475]: DEBUG nova.compute.provider_tree [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.442641] env[69475]: INFO nova.compute.manager [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Took 0.56 seconds to detach 1 volumes for instance. [ 837.494553] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508187, 'name': Rename_Task, 'duration_secs': 0.141584} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.494828] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 837.495091] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-10013cbf-9bef-4839-a517-28e5ccdb125a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.501873] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 837.501873] env[69475]: value = "task-3508188" [ 837.501873] env[69475]: _type = "Task" [ 837.501873] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.510201] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508188, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.586481] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f0ca7-628a-0320-17c2-743dae1382ba, 'name': SearchDatastore_Task, 'duration_secs': 0.011922} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.587190] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 837.587190] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 235653ac-a893-4f42-a394-dd81f61f0d73/235653ac-a893-4f42-a394-dd81f61f0d73.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 837.587316] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12145ff6-76f3-47c3-a1d5-558d0d060e2d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.593757] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 837.593757] env[69475]: value = "task-3508189" [ 837.593757] env[69475]: _type = "Task" [ 837.593757] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.602623] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508189, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.676037] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3263fe85-437d-48d2-82fa-d7dad7cdd128 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "86647493-8b2c-46bd-94d3-c973e843f778" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.426s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 837.831173] env[69475]: INFO nova.compute.manager [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Rescuing [ 837.834232] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "refresh_cache-86647493-8b2c-46bd-94d3-c973e843f778" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.834232] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquired lock "refresh_cache-86647493-8b2c-46bd-94d3-c973e843f778" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 837.834232] env[69475]: DEBUG nova.network.neutron [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 837.913457] env[69475]: DEBUG nova.scheduler.client.report [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 837.929335] env[69475]: DEBUG nova.compute.manager [req-1998ba32-9eef-4648-9f3f-399388866ddb req-cc8659b5-eb4e-474c-8959-f30b9c4244b5 service nova] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Received event network-vif-deleted-eb7198c7-072e-4cfe-bfdb-5306e3098955 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 837.929335] env[69475]: INFO nova.compute.manager [req-1998ba32-9eef-4648-9f3f-399388866ddb req-cc8659b5-eb4e-474c-8959-f30b9c4244b5 service nova] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Neutron deleted interface eb7198c7-072e-4cfe-bfdb-5306e3098955; detaching it from the instance and deleting it from the info cache [ 837.929335] env[69475]: DEBUG nova.network.neutron [req-1998ba32-9eef-4648-9f3f-399388866ddb req-cc8659b5-eb4e-474c-8959-f30b9c4244b5 service nova] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.949096] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.013699] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508188, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.027456] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3deb8676-1382-4e90-ba2b-e80a0c5e5d97 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "baf27027-678d-4167-bb9b-df410aeb0e82" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 838.027762] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3deb8676-1382-4e90-ba2b-e80a0c5e5d97 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "baf27027-678d-4167-bb9b-df410aeb0e82" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.027954] env[69475]: DEBUG nova.compute.manager [None req-3deb8676-1382-4e90-ba2b-e80a0c5e5d97 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 838.028924] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fece2d81-0f18-4d33-b177-aa892b5eb2cc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.038166] env[69475]: DEBUG nova.compute.manager [None req-3deb8676-1382-4e90-ba2b-e80a0c5e5d97 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69475) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 838.038774] env[69475]: DEBUG nova.objects.instance [None req-3deb8676-1382-4e90-ba2b-e80a0c5e5d97 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lazy-loading 'flavor' on Instance uuid baf27027-678d-4167-bb9b-df410aeb0e82 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 838.064205] env[69475]: DEBUG nova.network.neutron [-] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.106300] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508189, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.418973] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.157s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.421329] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.938s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.422881] env[69475]: INFO nova.compute.claims [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 838.431416] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1788b1ae-1fb3-4434-b018-73d08865f986 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.440671] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce25621-d22b-4b5e-98f6-68419b79fef4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.452098] env[69475]: INFO nova.scheduler.client.report [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Deleted allocations for instance 41c23568-c8d7-4d6c-8cc4-a94c95b3223a [ 838.477290] env[69475]: DEBUG nova.compute.manager [req-1998ba32-9eef-4648-9f3f-399388866ddb req-cc8659b5-eb4e-474c-8959-f30b9c4244b5 service nova] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Detach interface failed, port_id=eb7198c7-072e-4cfe-bfdb-5306e3098955, reason: Instance 8bea34ef-0caf-4cdb-a689-dd747d9b52ea could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 838.513766] env[69475]: DEBUG oslo_vmware.api [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508188, 'name': PowerOnVM_Task, 'duration_secs': 0.76618} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.513766] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 838.513931] env[69475]: INFO nova.compute.manager [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Took 8.36 seconds to spawn the instance on the hypervisor. [ 838.513995] env[69475]: DEBUG nova.compute.manager [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 838.514800] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-787e6008-3d42-4eb8-9d2a-dd328ede6877 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.557415] env[69475]: DEBUG nova.network.neutron [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Updating instance_info_cache with network_info: [{"id": "7ca686e9-6693-4490-aabc-712796a8fe04", "address": "fa:16:3e:d3:5c:f8", "network": {"id": "b2ee7427-b6b5-4fb8-acdf-fa1d5ecaaeb1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-464853755-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ef7fb53bce6145da8fe1e2f8beb57807", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ca686e9-66", "ovs_interfaceid": "7ca686e9-6693-4490-aabc-712796a8fe04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 838.566391] env[69475]: INFO nova.compute.manager [-] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Took 1.61 seconds to deallocate network for instance. [ 838.604599] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508189, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553046} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.604877] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 235653ac-a893-4f42-a394-dd81f61f0d73/235653ac-a893-4f42-a394-dd81f61f0d73.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 838.605216] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 838.609047] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5bfcf245-3940-4d51-83ad-83491df2c9b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.611253] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 838.611253] env[69475]: value = "task-3508190" [ 838.611253] env[69475]: _type = "Task" [ 838.611253] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.621036] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508190, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.960786] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b64cadcd-9376-48bd-bdf7-7bac930acf0b tempest-InstanceActionsV221TestJSON-1686320254 tempest-InstanceActionsV221TestJSON-1686320254-project-member] Lock "41c23568-c8d7-4d6c-8cc4-a94c95b3223a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.164s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.032042] env[69475]: INFO nova.compute.manager [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Took 40.02 seconds to build instance. [ 839.046161] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3deb8676-1382-4e90-ba2b-e80a0c5e5d97 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 839.046464] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-64127df1-1e2e-4482-bb91-ac66201bf6f5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.054215] env[69475]: DEBUG oslo_vmware.api [None req-3deb8676-1382-4e90-ba2b-e80a0c5e5d97 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 839.054215] env[69475]: value = "task-3508191" [ 839.054215] env[69475]: _type = "Task" [ 839.054215] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.062326] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Releasing lock "refresh_cache-86647493-8b2c-46bd-94d3-c973e843f778" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 839.064684] env[69475]: DEBUG oslo_vmware.api [None req-3deb8676-1382-4e90-ba2b-e80a0c5e5d97 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508191, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.118346] env[69475]: INFO nova.compute.manager [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Took 0.55 seconds to detach 1 volumes for instance. [ 839.120475] env[69475]: DEBUG nova.compute.manager [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Deleting volume: f89046dd-6d18-4fc2-bdc5-f7976aa2861d {{(pid=69475) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 839.127768] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508190, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069872} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.128094] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 839.129210] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6ccaccf-6f86-46b9-a614-b342e11c6e46 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.153171] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Reconfiguring VM instance instance-00000010 to attach disk [datastore2] 235653ac-a893-4f42-a394-dd81f61f0d73/235653ac-a893-4f42-a394-dd81f61f0d73.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 839.153891] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-66d872c2-05b1-4f80-a94d-08d6df57798b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.180834] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 839.180834] env[69475]: value = "task-3508192" [ 839.180834] env[69475]: _type = "Task" [ 839.180834] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.190346] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508192, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.534789] env[69475]: DEBUG oslo_concurrency.lockutils [None req-93b6a52e-9f3e-4f20-96b1-a590d9141206 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.567s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.566109] env[69475]: DEBUG oslo_vmware.api [None req-3deb8676-1382-4e90-ba2b-e80a0c5e5d97 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508191, 'name': PowerOffVM_Task, 'duration_secs': 0.213701} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.566377] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3deb8676-1382-4e90-ba2b-e80a0c5e5d97 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 839.566566] env[69475]: DEBUG nova.compute.manager [None req-3deb8676-1382-4e90-ba2b-e80a0c5e5d97 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 839.567436] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd868d3-45b1-42d9-9379-92757d180a6a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.676794] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 839.693346] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508192, 'name': ReconfigVM_Task, 'duration_secs': 0.29416} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.693796] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Reconfigured VM instance instance-00000010 to attach disk [datastore2] 235653ac-a893-4f42-a394-dd81f61f0d73/235653ac-a893-4f42-a394-dd81f61f0d73.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 839.694610] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-31dbc08c-fd4f-418d-88dc-78b29f3c8207 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.704064] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 839.704064] env[69475]: value = "task-3508194" [ 839.704064] env[69475]: _type = "Task" [ 839.704064] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.713065] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508194, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.920292] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abfcb4c8-78dd-4f14-841b-c389082a454a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.928497] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3137e26-be20-43c5-87e8-f1b0ac682a3f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.966045] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf2e648-3a28-4fed-8b7b-5112a2f9c9b6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.973501] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfaf3470-f96e-4e40-b9dd-0512dce6d852 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.986972] env[69475]: DEBUG nova.compute.provider_tree [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.078857] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3deb8676-1382-4e90-ba2b-e80a0c5e5d97 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "baf27027-678d-4167-bb9b-df410aeb0e82" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.051s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 840.213571] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508194, 'name': Rename_Task, 'duration_secs': 0.23806} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.214682] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 840.214967] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-499a749b-f1fe-439d-8fbe-e5c05327c235 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.227118] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 840.227118] env[69475]: value = "task-3508195" [ 840.227118] env[69475]: _type = "Task" [ 840.227118] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.238925] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508195, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.494379] env[69475]: DEBUG nova.scheduler.client.report [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 840.502661] env[69475]: DEBUG nova.compute.manager [req-04cb3f26-ef30-4d97-8207-95837ab50adc req-f455ba1c-fa27-476f-9ab0-4f1944b8ea30 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Received event network-changed-30f10284-138a-4774-b024-33ffa906ef81 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 840.502851] env[69475]: DEBUG nova.compute.manager [req-04cb3f26-ef30-4d97-8207-95837ab50adc req-f455ba1c-fa27-476f-9ab0-4f1944b8ea30 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Refreshing instance network info cache due to event network-changed-30f10284-138a-4774-b024-33ffa906ef81. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 840.503102] env[69475]: DEBUG oslo_concurrency.lockutils [req-04cb3f26-ef30-4d97-8207-95837ab50adc req-f455ba1c-fa27-476f-9ab0-4f1944b8ea30 service nova] Acquiring lock "refresh_cache-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.503244] env[69475]: DEBUG oslo_concurrency.lockutils [req-04cb3f26-ef30-4d97-8207-95837ab50adc req-f455ba1c-fa27-476f-9ab0-4f1944b8ea30 service nova] Acquired lock "refresh_cache-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.503403] env[69475]: DEBUG nova.network.neutron [req-04cb3f26-ef30-4d97-8207-95837ab50adc req-f455ba1c-fa27-476f-9ab0-4f1944b8ea30 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Refreshing network info cache for port 30f10284-138a-4774-b024-33ffa906ef81 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 840.608401] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 840.608756] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2bdd02e0-7216-49dc-85b5-303868449512 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.615716] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 840.615716] env[69475]: value = "task-3508196" [ 840.615716] env[69475]: _type = "Task" [ 840.615716] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.626263] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508196, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.703549] env[69475]: DEBUG nova.objects.instance [None req-3ee41b31-f2b2-4df2-93ac-f4398053a188 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lazy-loading 'flavor' on Instance uuid baf27027-678d-4167-bb9b-df410aeb0e82 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 840.738376] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508195, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.005686] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.584s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.010034] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.641s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.011748] env[69475]: INFO nova.compute.claims [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 841.125432] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508196, 'name': PowerOffVM_Task, 'duration_secs': 0.430545} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.125702] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 841.126592] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a86d7bac-4b50-4617-81b6-62975de7ba1b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.148597] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ded5ab8-ccf1-47e9-84d8-19b0337ce8b6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.188864] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 841.189184] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb670abd-e1d8-443e-ad20-316adcdbfbeb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.195740] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 841.195740] env[69475]: value = "task-3508197" [ 841.195740] env[69475]: _type = "Task" [ 841.195740] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.203024] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508197, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.214556] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3ee41b31-f2b2-4df2-93ac-f4398053a188 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.214718] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3ee41b31-f2b2-4df2-93ac-f4398053a188 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.214894] env[69475]: DEBUG nova.network.neutron [None req-3ee41b31-f2b2-4df2-93ac-f4398053a188 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 841.215091] env[69475]: DEBUG nova.objects.instance [None req-3ee41b31-f2b2-4df2-93ac-f4398053a188 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lazy-loading 'info_cache' on Instance uuid baf27027-678d-4167-bb9b-df410aeb0e82 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 841.238598] env[69475]: DEBUG oslo_vmware.api [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508195, 'name': PowerOnVM_Task, 'duration_secs': 0.695386} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.238870] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 841.239181] env[69475]: DEBUG nova.compute.manager [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 841.240047] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eafdd4a-4fe1-47f2-9a27-dcbc68d4d433 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.288461] env[69475]: DEBUG nova.network.neutron [req-04cb3f26-ef30-4d97-8207-95837ab50adc req-f455ba1c-fa27-476f-9ab0-4f1944b8ea30 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Updated VIF entry in instance network info cache for port 30f10284-138a-4774-b024-33ffa906ef81. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 841.288856] env[69475]: DEBUG nova.network.neutron [req-04cb3f26-ef30-4d97-8207-95837ab50adc req-f455ba1c-fa27-476f-9ab0-4f1944b8ea30 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Updating instance_info_cache with network_info: [{"id": "30f10284-138a-4774-b024-33ffa906ef81", "address": "fa:16:3e:49:ea:cb", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30f10284-13", "ovs_interfaceid": "30f10284-138a-4774-b024-33ffa906ef81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.516692] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Acquiring lock "74f1d624-57c8-4ea8-96fc-4681b25b817b" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.517618] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Lock "74f1d624-57c8-4ea8-96fc-4681b25b817b" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.706524] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] VM already powered off {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 841.706761] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 841.707010] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 841.708520] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 841.708520] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 841.708520] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a959d24b-12f7-462b-aae5-e8e2b8b9af00 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.718376] env[69475]: DEBUG nova.objects.base [None req-3ee41b31-f2b2-4df2-93ac-f4398053a188 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 841.720086] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 841.720266] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 841.721378] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68ab2b1a-48f9-4041-9124-139f2320923a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.726523] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 841.726523] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b24a22-d886-b972-38b3-6bcc7f29a908" [ 841.726523] env[69475]: _type = "Task" [ 841.726523] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.735917] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b24a22-d886-b972-38b3-6bcc7f29a908, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.762702] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.792072] env[69475]: DEBUG oslo_concurrency.lockutils [req-04cb3f26-ef30-4d97-8207-95837ab50adc req-f455ba1c-fa27-476f-9ab0-4f1944b8ea30 service nova] Releasing lock "refresh_cache-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.023026] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Lock "74f1d624-57c8-4ea8-96fc-4681b25b817b" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.503s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 842.023026] env[69475]: DEBUG nova.compute.manager [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 842.240083] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b24a22-d886-b972-38b3-6bcc7f29a908, 'name': SearchDatastore_Task, 'duration_secs': 0.012795} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.240868] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3417a64c-8681-4275-b8d8-63b9f2371757 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.247983] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 842.247983] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dd2186-79f3-00d8-de91-2807196da4fd" [ 842.247983] env[69475]: _type = "Task" [ 842.247983] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.257723] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dd2186-79f3-00d8-de91-2807196da4fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.457372] env[69475]: DEBUG nova.network.neutron [None req-3ee41b31-f2b2-4df2-93ac-f4398053a188 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating instance_info_cache with network_info: [{"id": "4059da75-efc8-42ee-90b1-8202220d1621", "address": "fa:16:3e:1e:8b:99", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4059da75-ef", "ovs_interfaceid": "4059da75-efc8-42ee-90b1-8202220d1621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 842.519139] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1883b5a-e611-4919-8e1c-68a02498877f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.527303] env[69475]: DEBUG nova.compute.utils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 842.528797] env[69475]: DEBUG nova.compute.manager [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 842.529116] env[69475]: DEBUG nova.network.neutron [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 842.532309] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a72bddb-6607-48f8-967e-d83660b4b5d3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.569330] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8019ea28-0326-4edb-adf8-c657ad7840f3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.578624] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73ce4aa-58ef-455d-95d9-9bf1d998a42e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.583985] env[69475]: DEBUG nova.policy [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '90ece0be4039424d98ac26a5902d1462', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e9ef0d2164844746986555a4e808eee7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 842.597097] env[69475]: DEBUG nova.compute.provider_tree [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 842.759690] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dd2186-79f3-00d8-de91-2807196da4fd, 'name': SearchDatastore_Task, 'duration_secs': 0.032387} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.760131] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.760472] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 86647493-8b2c-46bd-94d3-c973e843f778/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk. {{(pid=69475) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 842.760798] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aa9d9247-e6c8-4343-9bca-a4ab90686efb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.768217] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 842.768217] env[69475]: value = "task-3508198" [ 842.768217] env[69475]: _type = "Task" [ 842.768217] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.778443] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508198, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.961435] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3ee41b31-f2b2-4df2-93ac-f4398053a188 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 843.010217] env[69475]: DEBUG nova.network.neutron [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Successfully created port: 64fc6b26-9e1f-4a44-8001-126576f96e62 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 843.038166] env[69475]: DEBUG nova.compute.manager [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 843.099896] env[69475]: DEBUG nova.scheduler.client.report [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 843.280607] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508198, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.348559] env[69475]: DEBUG oslo_concurrency.lockutils [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "2b0cc71c-862e-4eb0-afc4-b2125003b087" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.348831] env[69475]: DEBUG oslo_concurrency.lockutils [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "2b0cc71c-862e-4eb0-afc4-b2125003b087" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.349061] env[69475]: DEBUG oslo_concurrency.lockutils [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "2b0cc71c-862e-4eb0-afc4-b2125003b087-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.349253] env[69475]: DEBUG oslo_concurrency.lockutils [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "2b0cc71c-862e-4eb0-afc4-b2125003b087-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.349477] env[69475]: DEBUG oslo_concurrency.lockutils [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "2b0cc71c-862e-4eb0-afc4-b2125003b087-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.356613] env[69475]: INFO nova.compute.manager [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Terminating instance [ 843.609030] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.596s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.609030] env[69475]: DEBUG nova.compute.manager [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 843.611939] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 34.884s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.781011] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508198, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57722} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.781309] env[69475]: INFO nova.virt.vmwareapi.ds_util [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 86647493-8b2c-46bd-94d3-c973e843f778/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk. [ 843.782626] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc33b7f4-6868-4bd2-9227-eab584dd82bf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.809600] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 86647493-8b2c-46bd-94d3-c973e843f778/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 843.809600] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b243ae6f-0b28-4bb1-a001-a60cceead065 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.828499] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 843.828499] env[69475]: value = "task-3508199" [ 843.828499] env[69475]: _type = "Task" [ 843.828499] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.840624] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508199, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.861587] env[69475]: DEBUG nova.compute.manager [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 843.861824] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 843.862868] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22217cd2-e6ad-434d-b79d-04a14bce0631 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.873199] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 843.873199] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae892fd7-994c-4631-aa95-f4c7ced902a3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.877741] env[69475]: DEBUG oslo_vmware.api [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 843.877741] env[69475]: value = "task-3508200" [ 843.877741] env[69475]: _type = "Task" [ 843.877741] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.887882] env[69475]: DEBUG oslo_vmware.api [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508200, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.971032] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ee41b31-f2b2-4df2-93ac-f4398053a188 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 843.971032] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-591f51b1-d988-4c3f-9441-644668e5f7c8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.977430] env[69475]: DEBUG oslo_vmware.api [None req-3ee41b31-f2b2-4df2-93ac-f4398053a188 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 843.977430] env[69475]: value = "task-3508201" [ 843.977430] env[69475]: _type = "Task" [ 843.977430] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.986048] env[69475]: DEBUG oslo_vmware.api [None req-3ee41b31-f2b2-4df2-93ac-f4398053a188 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508201, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.049915] env[69475]: DEBUG nova.compute.manager [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 844.084044] env[69475]: DEBUG nova.virt.hardware [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 844.084228] env[69475]: DEBUG nova.virt.hardware [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 844.084290] env[69475]: DEBUG nova.virt.hardware [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 844.084478] env[69475]: DEBUG nova.virt.hardware [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 844.084900] env[69475]: DEBUG nova.virt.hardware [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 844.085122] env[69475]: DEBUG nova.virt.hardware [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 844.085350] env[69475]: DEBUG nova.virt.hardware [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 844.085573] env[69475]: DEBUG nova.virt.hardware [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 844.085784] env[69475]: DEBUG nova.virt.hardware [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 844.085972] env[69475]: DEBUG nova.virt.hardware [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 844.086155] env[69475]: DEBUG nova.virt.hardware [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 844.087655] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681097df-5744-404f-8e7e-07fc481fc697 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.097912] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b5d9d8f-3d67-4954-84be-5c8880b3782c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.113278] env[69475]: DEBUG nova.compute.utils [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 844.114518] env[69475]: DEBUG nova.compute.manager [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 844.114684] env[69475]: DEBUG nova.network.neutron [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 844.188598] env[69475]: DEBUG nova.policy [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1fc77aa810b24582ba0069952b28d1b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0f2c2f5187934f5da108a1c96a3a3125', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 844.338537] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508199, 'name': ReconfigVM_Task, 'duration_secs': 0.312174} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.341283] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 86647493-8b2c-46bd-94d3-c973e843f778/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 844.342075] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ef963f-9a83-4eb1-b3e1-335ca18b1a57 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.372059] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d75e02c-91b7-49c2-b82d-61c88599af2c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.398431] env[69475]: DEBUG oslo_vmware.api [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508200, 'name': PowerOffVM_Task, 'duration_secs': 0.264685} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.398431] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 844.398431] env[69475]: value = "task-3508202" [ 844.398431] env[69475]: _type = "Task" [ 844.398431] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.400715] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 844.400803] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 844.401265] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8793a9c-d841-4936-b800-4bd18c4c1aba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.417050] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508202, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.490938] env[69475]: DEBUG oslo_vmware.api [None req-3ee41b31-f2b2-4df2-93ac-f4398053a188 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508201, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.542518] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 844.542834] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 844.542949] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Deleting the datastore file [datastore2] 2b0cc71c-862e-4eb0-afc4-b2125003b087 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 844.543292] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-451ae814-7e72-4070-aef0-3c812603e948 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.549944] env[69475]: DEBUG oslo_vmware.api [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 844.549944] env[69475]: value = "task-3508204" [ 844.549944] env[69475]: _type = "Task" [ 844.549944] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.559381] env[69475]: DEBUG oslo_vmware.api [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508204, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.581211] env[69475]: DEBUG nova.network.neutron [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Successfully created port: 8bfd67d5-a8aa-4af9-bef4-2010baea67f2 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 844.621215] env[69475]: DEBUG nova.compute.manager [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 844.739903] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e44fbfd6-28d3-4c79-af16-d05415a3bf6e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.753679] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a283b6-71f5-4f53-9721-a4971c8ad8c8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.791450] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c60cc59-66dd-4843-bbad-53cfa6b415fd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.795172] env[69475]: DEBUG nova.compute.manager [req-afc91270-c1ba-4a37-a26a-e765d2e8bbd3 req-d39ce2c3-b3c3-48b7-8628-8e04aa6dbc14 service nova] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Received event network-vif-plugged-64fc6b26-9e1f-4a44-8001-126576f96e62 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 844.795720] env[69475]: DEBUG oslo_concurrency.lockutils [req-afc91270-c1ba-4a37-a26a-e765d2e8bbd3 req-d39ce2c3-b3c3-48b7-8628-8e04aa6dbc14 service nova] Acquiring lock "56f0e59a-1c37-4977-81dc-da1a274ce7e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 844.795720] env[69475]: DEBUG oslo_concurrency.lockutils [req-afc91270-c1ba-4a37-a26a-e765d2e8bbd3 req-d39ce2c3-b3c3-48b7-8628-8e04aa6dbc14 service nova] Lock "56f0e59a-1c37-4977-81dc-da1a274ce7e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.795833] env[69475]: DEBUG oslo_concurrency.lockutils [req-afc91270-c1ba-4a37-a26a-e765d2e8bbd3 req-d39ce2c3-b3c3-48b7-8628-8e04aa6dbc14 service nova] Lock "56f0e59a-1c37-4977-81dc-da1a274ce7e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.795994] env[69475]: DEBUG nova.compute.manager [req-afc91270-c1ba-4a37-a26a-e765d2e8bbd3 req-d39ce2c3-b3c3-48b7-8628-8e04aa6dbc14 service nova] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] No waiting events found dispatching network-vif-plugged-64fc6b26-9e1f-4a44-8001-126576f96e62 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 844.796224] env[69475]: WARNING nova.compute.manager [req-afc91270-c1ba-4a37-a26a-e765d2e8bbd3 req-d39ce2c3-b3c3-48b7-8628-8e04aa6dbc14 service nova] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Received unexpected event network-vif-plugged-64fc6b26-9e1f-4a44-8001-126576f96e62 for instance with vm_state building and task_state spawning. [ 844.801988] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed5999de-a4ce-405e-b441-371ff891f7ea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.818286] env[69475]: DEBUG nova.compute.provider_tree [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 844.883490] env[69475]: DEBUG nova.network.neutron [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Successfully updated port: 64fc6b26-9e1f-4a44-8001-126576f96e62 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 844.911621] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508202, 'name': ReconfigVM_Task, 'duration_secs': 0.156353} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.911621] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 844.911621] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-338847c1-02ac-4810-9643-ff0522dfd6bf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.917279] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 844.917279] env[69475]: value = "task-3508205" [ 844.917279] env[69475]: _type = "Task" [ 844.917279] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.925996] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508205, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.988857] env[69475]: DEBUG oslo_vmware.api [None req-3ee41b31-f2b2-4df2-93ac-f4398053a188 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508201, 'name': PowerOnVM_Task, 'duration_secs': 0.672578} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.990330] env[69475]: DEBUG nova.network.neutron [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Successfully created port: 7c0a7b95-70af-4d8c-a572-06b7aa8b7a34 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 844.992685] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ee41b31-f2b2-4df2-93ac-f4398053a188 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 844.992900] env[69475]: DEBUG nova.compute.manager [None req-3ee41b31-f2b2-4df2-93ac-f4398053a188 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 844.993721] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ff6dd3-85a6-4560-a38f-6ed4344c0a8b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.062110] env[69475]: DEBUG oslo_vmware.api [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508204, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.322198] env[69475]: DEBUG nova.scheduler.client.report [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 845.386598] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Acquiring lock "refresh_cache-56f0e59a-1c37-4977-81dc-da1a274ce7e7" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.387420] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Acquired lock "refresh_cache-56f0e59a-1c37-4977-81dc-da1a274ce7e7" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.387722] env[69475]: DEBUG nova.network.neutron [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 845.427503] env[69475]: DEBUG oslo_vmware.api [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508205, 'name': PowerOnVM_Task, 'duration_secs': 0.398862} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.427868] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 845.433468] env[69475]: DEBUG nova.compute.manager [None req-a348bda8-d46c-4d7c-8a32-c624a3d36b83 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 845.434673] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa7bc6d5-add7-4522-9491-8a648f6a515c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.561398] env[69475]: DEBUG oslo_vmware.api [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508204, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.531999} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.564393] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 845.564393] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 845.564393] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 845.564393] env[69475]: INFO nova.compute.manager [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Took 1.70 seconds to destroy the instance on the hypervisor. [ 845.564393] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 845.564393] env[69475]: DEBUG nova.compute.manager [-] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 845.564393] env[69475]: DEBUG nova.network.neutron [-] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 845.630484] env[69475]: DEBUG nova.compute.manager [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 845.655317] env[69475]: DEBUG nova.virt.hardware [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 845.655570] env[69475]: DEBUG nova.virt.hardware [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 845.655727] env[69475]: DEBUG nova.virt.hardware [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 845.655907] env[69475]: DEBUG nova.virt.hardware [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 845.656351] env[69475]: DEBUG nova.virt.hardware [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 845.656603] env[69475]: DEBUG nova.virt.hardware [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 845.656916] env[69475]: DEBUG nova.virt.hardware [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 845.657308] env[69475]: DEBUG nova.virt.hardware [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 845.657545] env[69475]: DEBUG nova.virt.hardware [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 845.657753] env[69475]: DEBUG nova.virt.hardware [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 845.657966] env[69475]: DEBUG nova.virt.hardware [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 845.659348] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b6fa40-9e4e-4d17-9ba5-0ecd1815ddc9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.671170] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebaa719a-7acc-4334-9115-74b2b7903e00 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.926228] env[69475]: DEBUG nova.network.neutron [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 846.089059] env[69475]: DEBUG nova.network.neutron [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Updating instance_info_cache with network_info: [{"id": "64fc6b26-9e1f-4a44-8001-126576f96e62", "address": "fa:16:3e:05:46:0f", "network": {"id": "40f077ba-e6c8-4075-921b-6563c070fe1a", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1124489755-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9ef0d2164844746986555a4e808eee7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ebd8af-aaf6-4d04-b869-3882e2571ed7", "external-id": "nsx-vlan-transportzone-541", "segmentation_id": 541, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64fc6b26-9e", "ovs_interfaceid": "64fc6b26-9e1f-4a44-8001-126576f96e62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.335380] env[69475]: DEBUG nova.network.neutron [-] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.337729] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.726s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.340559] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.913s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.342118] env[69475]: INFO nova.compute.claims [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 846.515499] env[69475]: DEBUG nova.network.neutron [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Successfully updated port: 8bfd67d5-a8aa-4af9-bef4-2010baea67f2 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 846.517245] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "82236043-3222-4134-8717-4c239ed12aba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.518523] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "82236043-3222-4134-8717-4c239ed12aba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.592674] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Releasing lock "refresh_cache-56f0e59a-1c37-4977-81dc-da1a274ce7e7" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.592995] env[69475]: DEBUG nova.compute.manager [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Instance network_info: |[{"id": "64fc6b26-9e1f-4a44-8001-126576f96e62", "address": "fa:16:3e:05:46:0f", "network": {"id": "40f077ba-e6c8-4075-921b-6563c070fe1a", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1124489755-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9ef0d2164844746986555a4e808eee7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ebd8af-aaf6-4d04-b869-3882e2571ed7", "external-id": "nsx-vlan-transportzone-541", "segmentation_id": 541, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64fc6b26-9e", "ovs_interfaceid": "64fc6b26-9e1f-4a44-8001-126576f96e62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 846.593450] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:46:0f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '04ebd8af-aaf6-4d04-b869-3882e2571ed7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '64fc6b26-9e1f-4a44-8001-126576f96e62', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 846.603269] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Creating folder: Project (e9ef0d2164844746986555a4e808eee7). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 846.603889] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f1cb453a-ee18-4a1c-b99f-15bc7f8ba59b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.614929] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Created folder: Project (e9ef0d2164844746986555a4e808eee7) in parent group-v700823. [ 846.615138] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Creating folder: Instances. Parent ref: group-v701003. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 846.615373] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce88a1ca-66c4-4975-9657-4d6acc3adb61 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.623612] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Created folder: Instances in parent group-v701003. [ 846.623841] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 846.624036] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 846.624242] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-283602bb-e666-4d71-a632-c713e013ee04 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.642537] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 846.642537] env[69475]: value = "task-3508208" [ 846.642537] env[69475]: _type = "Task" [ 846.642537] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.650840] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508208, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.841699] env[69475]: INFO nova.compute.manager [-] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Took 1.28 seconds to deallocate network for instance. [ 846.913570] env[69475]: INFO nova.scheduler.client.report [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Deleted allocation for migration 7cecf250-4d16-45a0-ba21-6bbaf5ce2c0e [ 846.922448] env[69475]: DEBUG nova.compute.manager [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Received event network-changed-64fc6b26-9e1f-4a44-8001-126576f96e62 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 846.922668] env[69475]: DEBUG nova.compute.manager [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Refreshing instance network info cache due to event network-changed-64fc6b26-9e1f-4a44-8001-126576f96e62. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 846.923098] env[69475]: DEBUG oslo_concurrency.lockutils [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] Acquiring lock "refresh_cache-56f0e59a-1c37-4977-81dc-da1a274ce7e7" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.923368] env[69475]: DEBUG oslo_concurrency.lockutils [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] Acquired lock "refresh_cache-56f0e59a-1c37-4977-81dc-da1a274ce7e7" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.923586] env[69475]: DEBUG nova.network.neutron [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Refreshing network info cache for port 64fc6b26-9e1f-4a44-8001-126576f96e62 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 847.020495] env[69475]: DEBUG nova.compute.manager [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 847.154198] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508208, 'name': CreateVM_Task, 'duration_secs': 0.308985} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.154476] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 847.155484] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.155484] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.155590] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 847.155843] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8942710-abac-4031-a15b-3437ba046b96 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.161218] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Waiting for the task: (returnval){ [ 847.161218] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527758a6-632b-8eb1-8e0f-ad2fd1050e33" [ 847.161218] env[69475]: _type = "Task" [ 847.161218] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.169638] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527758a6-632b-8eb1-8e0f-ad2fd1050e33, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.263136] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "a3ee83aa-f753-49e3-9db2-b1b67d6d211e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.263476] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "a3ee83aa-f753-49e3-9db2-b1b67d6d211e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.357593] env[69475]: DEBUG oslo_concurrency.lockutils [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.429022] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f1735e00-1aef-41b2-ad6b-3eeab3c12b6d tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "4b3b53d1-82bf-40e7-9988-af7b51e9883a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 42.260s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.548308] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.674908] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527758a6-632b-8eb1-8e0f-ad2fd1050e33, 'name': SearchDatastore_Task, 'duration_secs': 0.024015} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.678655] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.679034] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 847.679414] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.679666] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.679959] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 847.680598] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-376bb4b6-fccc-48b9-b91c-00253943d31a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.696065] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 847.696639] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 847.697047] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d32de81-baac-4021-b7e6-cc03ba60fa43 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.702153] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Waiting for the task: (returnval){ [ 847.702153] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529d0460-1fe4-4d63-b43b-b449c677d25a" [ 847.702153] env[69475]: _type = "Task" [ 847.702153] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.711379] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529d0460-1fe4-4d63-b43b-b449c677d25a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.721342] env[69475]: DEBUG nova.network.neutron [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Updated VIF entry in instance network info cache for port 64fc6b26-9e1f-4a44-8001-126576f96e62. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 847.721676] env[69475]: DEBUG nova.network.neutron [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Updating instance_info_cache with network_info: [{"id": "64fc6b26-9e1f-4a44-8001-126576f96e62", "address": "fa:16:3e:05:46:0f", "network": {"id": "40f077ba-e6c8-4075-921b-6563c070fe1a", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1124489755-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e9ef0d2164844746986555a4e808eee7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "04ebd8af-aaf6-4d04-b869-3882e2571ed7", "external-id": "nsx-vlan-transportzone-541", "segmentation_id": 541, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap64fc6b26-9e", "ovs_interfaceid": "64fc6b26-9e1f-4a44-8001-126576f96e62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.766597] env[69475]: DEBUG nova.compute.manager [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 847.926286] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5de8cf-86f9-4dc4-a897-e185a9c75285 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.934436] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3cb06e-9c62-4346-a0ae-471e2da11e47 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.968019] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91bf8b8a-be9d-4070-b067-2c1b74335fe3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.977967] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9782ecbc-2e42-4308-b28c-d4674054799a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.992010] env[69475]: DEBUG nova.compute.provider_tree [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.213123] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529d0460-1fe4-4d63-b43b-b449c677d25a, 'name': SearchDatastore_Task, 'duration_secs': 0.041576} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.213918] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5009fee3-de1e-424f-91be-02271561bb35 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.219957] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Waiting for the task: (returnval){ [ 848.219957] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5216e35d-4b0c-790a-b3d0-e3d116dd5886" [ 848.219957] env[69475]: _type = "Task" [ 848.219957] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.228286] env[69475]: DEBUG oslo_concurrency.lockutils [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] Releasing lock "refresh_cache-56f0e59a-1c37-4977-81dc-da1a274ce7e7" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.228476] env[69475]: DEBUG nova.compute.manager [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Received event network-vif-deleted-05ec4e25-3c6d-4d4b-a353-a749c7ee1242 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 848.228661] env[69475]: DEBUG nova.compute.manager [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Received event network-vif-plugged-8bfd67d5-a8aa-4af9-bef4-2010baea67f2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 848.228844] env[69475]: DEBUG oslo_concurrency.lockutils [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] Acquiring lock "2ade2ed6-4725-4913-8ac4-14a96ced3e4b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.229055] env[69475]: DEBUG oslo_concurrency.lockutils [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] Lock "2ade2ed6-4725-4913-8ac4-14a96ced3e4b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.229223] env[69475]: DEBUG oslo_concurrency.lockutils [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] Lock "2ade2ed6-4725-4913-8ac4-14a96ced3e4b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.229389] env[69475]: DEBUG nova.compute.manager [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] No waiting events found dispatching network-vif-plugged-8bfd67d5-a8aa-4af9-bef4-2010baea67f2 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 848.229554] env[69475]: WARNING nova.compute.manager [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Received unexpected event network-vif-plugged-8bfd67d5-a8aa-4af9-bef4-2010baea67f2 for instance with vm_state building and task_state spawning. [ 848.229739] env[69475]: DEBUG nova.compute.manager [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Received event network-changed-8bfd67d5-a8aa-4af9-bef4-2010baea67f2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 848.229869] env[69475]: DEBUG nova.compute.manager [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Refreshing instance network info cache due to event network-changed-8bfd67d5-a8aa-4af9-bef4-2010baea67f2. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 848.230089] env[69475]: DEBUG oslo_concurrency.lockutils [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] Acquiring lock "refresh_cache-2ade2ed6-4725-4913-8ac4-14a96ced3e4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.230273] env[69475]: DEBUG oslo_concurrency.lockutils [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] Acquired lock "refresh_cache-2ade2ed6-4725-4913-8ac4-14a96ced3e4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 848.230519] env[69475]: DEBUG nova.network.neutron [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Refreshing network info cache for port 8bfd67d5-a8aa-4af9-bef4-2010baea67f2 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 848.231610] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5216e35d-4b0c-790a-b3d0-e3d116dd5886, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.288622] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.495854] env[69475]: DEBUG nova.scheduler.client.report [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 848.729320] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5216e35d-4b0c-790a-b3d0-e3d116dd5886, 'name': SearchDatastore_Task, 'duration_secs': 0.010714} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.729632] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.729898] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 56f0e59a-1c37-4977-81dc-da1a274ce7e7/56f0e59a-1c37-4977-81dc-da1a274ce7e7.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 848.730168] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-88caf4e0-1d59-4889-8991-f9791efc56a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.736368] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Waiting for the task: (returnval){ [ 848.736368] env[69475]: value = "task-3508209" [ 848.736368] env[69475]: _type = "Task" [ 848.736368] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.743555] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': task-3508209, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.774101] env[69475]: DEBUG nova.network.neutron [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Successfully updated port: 7c0a7b95-70af-4d8c-a572-06b7aa8b7a34 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 848.779541] env[69475]: DEBUG nova.network.neutron [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 848.867111] env[69475]: DEBUG nova.network.neutron [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.007347] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.667s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.007892] env[69475]: DEBUG nova.compute.manager [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 849.012709] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.609s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.012709] env[69475]: DEBUG nova.objects.instance [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lazy-loading 'resources' on Instance uuid 712e93b6-e797-4b9f-b39b-33373cede403 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 849.016601] env[69475]: DEBUG nova.compute.manager [req-985a7403-b4c1-4f4c-bc4a-293d85e369ea req-a0e78405-dd6f-4612-98ee-82e1e1349156 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Received event network-vif-plugged-7c0a7b95-70af-4d8c-a572-06b7aa8b7a34 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 849.016601] env[69475]: DEBUG oslo_concurrency.lockutils [req-985a7403-b4c1-4f4c-bc4a-293d85e369ea req-a0e78405-dd6f-4612-98ee-82e1e1349156 service nova] Acquiring lock "2ade2ed6-4725-4913-8ac4-14a96ced3e4b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.016601] env[69475]: DEBUG oslo_concurrency.lockutils [req-985a7403-b4c1-4f4c-bc4a-293d85e369ea req-a0e78405-dd6f-4612-98ee-82e1e1349156 service nova] Lock "2ade2ed6-4725-4913-8ac4-14a96ced3e4b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.016601] env[69475]: DEBUG oslo_concurrency.lockutils [req-985a7403-b4c1-4f4c-bc4a-293d85e369ea req-a0e78405-dd6f-4612-98ee-82e1e1349156 service nova] Lock "2ade2ed6-4725-4913-8ac4-14a96ced3e4b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.016796] env[69475]: DEBUG nova.compute.manager [req-985a7403-b4c1-4f4c-bc4a-293d85e369ea req-a0e78405-dd6f-4612-98ee-82e1e1349156 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] No waiting events found dispatching network-vif-plugged-7c0a7b95-70af-4d8c-a572-06b7aa8b7a34 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 849.016824] env[69475]: WARNING nova.compute.manager [req-985a7403-b4c1-4f4c-bc4a-293d85e369ea req-a0e78405-dd6f-4612-98ee-82e1e1349156 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Received unexpected event network-vif-plugged-7c0a7b95-70af-4d8c-a572-06b7aa8b7a34 for instance with vm_state building and task_state spawning. [ 849.017215] env[69475]: DEBUG nova.compute.manager [req-985a7403-b4c1-4f4c-bc4a-293d85e369ea req-a0e78405-dd6f-4612-98ee-82e1e1349156 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Received event network-changed-7c0a7b95-70af-4d8c-a572-06b7aa8b7a34 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 849.017215] env[69475]: DEBUG nova.compute.manager [req-985a7403-b4c1-4f4c-bc4a-293d85e369ea req-a0e78405-dd6f-4612-98ee-82e1e1349156 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Refreshing instance network info cache due to event network-changed-7c0a7b95-70af-4d8c-a572-06b7aa8b7a34. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 849.017736] env[69475]: DEBUG oslo_concurrency.lockutils [req-985a7403-b4c1-4f4c-bc4a-293d85e369ea req-a0e78405-dd6f-4612-98ee-82e1e1349156 service nova] Acquiring lock "refresh_cache-2ade2ed6-4725-4913-8ac4-14a96ced3e4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.165689] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "b1b04eb9-ded6-4425-8a06-0c26c086a09b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.165955] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "b1b04eb9-ded6-4425-8a06-0c26c086a09b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.246796] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': task-3508209, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480851} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.247116] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 56f0e59a-1c37-4977-81dc-da1a274ce7e7/56f0e59a-1c37-4977-81dc-da1a274ce7e7.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 849.247309] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 849.247583] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27adbb98-b259-4818-b519-6362a89d7365 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.254210] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Waiting for the task: (returnval){ [ 849.254210] env[69475]: value = "task-3508210" [ 849.254210] env[69475]: _type = "Task" [ 849.254210] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.261975] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': task-3508210, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.276751] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquiring lock "refresh_cache-2ade2ed6-4725-4913-8ac4-14a96ced3e4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.369672] env[69475]: DEBUG oslo_concurrency.lockutils [req-bc639554-763b-427e-b33b-6f240db37388 req-ece981f0-52c0-4ad2-92e1-bab3391b41d2 service nova] Releasing lock "refresh_cache-2ade2ed6-4725-4913-8ac4-14a96ced3e4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.370322] env[69475]: DEBUG oslo_concurrency.lockutils [req-985a7403-b4c1-4f4c-bc4a-293d85e369ea req-a0e78405-dd6f-4612-98ee-82e1e1349156 service nova] Acquired lock "refresh_cache-2ade2ed6-4725-4913-8ac4-14a96ced3e4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.370430] env[69475]: DEBUG nova.network.neutron [req-985a7403-b4c1-4f4c-bc4a-293d85e369ea req-a0e78405-dd6f-4612-98ee-82e1e1349156 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Refreshing network info cache for port 7c0a7b95-70af-4d8c-a572-06b7aa8b7a34 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 849.513880] env[69475]: DEBUG nova.compute.utils [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 849.515330] env[69475]: DEBUG nova.compute.manager [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 849.515506] env[69475]: DEBUG nova.network.neutron [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 849.555267] env[69475]: DEBUG nova.policy [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b9afaaccb2ad4d2391e4624d54143816', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '35487483c5554eddbe1994b81ca45a13', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 849.669376] env[69475]: DEBUG nova.compute.manager [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 849.768205] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': task-3508210, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066354} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.768659] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 849.769200] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2460cc59-4ec6-48b3-b73c-a0f029cf1a64 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.796981] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 56f0e59a-1c37-4977-81dc-da1a274ce7e7/56f0e59a-1c37-4977-81dc-da1a274ce7e7.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 849.800986] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c5f7748-af4b-4409-907d-d6f62d7aa204 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.819913] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Waiting for the task: (returnval){ [ 849.819913] env[69475]: value = "task-3508211" [ 849.819913] env[69475]: _type = "Task" [ 849.819913] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.831076] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': task-3508211, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.836585] env[69475]: DEBUG nova.network.neutron [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Successfully created port: b4a2d38d-faf3-4c3c-9bf1-a9ae9f50da7f {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 849.917202] env[69475]: DEBUG nova.network.neutron [req-985a7403-b4c1-4f4c-bc4a-293d85e369ea req-a0e78405-dd6f-4612-98ee-82e1e1349156 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 850.018918] env[69475]: DEBUG nova.compute.manager [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 850.039204] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073f4ed8-1b16-48b3-9011-54b8faa1acd3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.043549] env[69475]: DEBUG nova.network.neutron [req-985a7403-b4c1-4f4c-bc4a-293d85e369ea req-a0e78405-dd6f-4612-98ee-82e1e1349156 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.049461] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fed448b-89b6-4ae9-9e76-11096bddd385 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.081464] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982f30eb-089d-4bb9-a3d5-0a7ba644c811 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.089378] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c75c272a-4c7b-4a40-8b37-27d74f5063ba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.103706] env[69475]: DEBUG nova.compute.provider_tree [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.189898] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.330175] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': task-3508211, 'name': ReconfigVM_Task, 'duration_secs': 0.254116} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.330503] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 56f0e59a-1c37-4977-81dc-da1a274ce7e7/56f0e59a-1c37-4977-81dc-da1a274ce7e7.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 850.331123] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d926c551-b3b6-4403-8b0c-1de994bb2bc5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.337160] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Waiting for the task: (returnval){ [ 850.337160] env[69475]: value = "task-3508212" [ 850.337160] env[69475]: _type = "Task" [ 850.337160] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.344862] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': task-3508212, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.547727] env[69475]: DEBUG oslo_concurrency.lockutils [req-985a7403-b4c1-4f4c-bc4a-293d85e369ea req-a0e78405-dd6f-4612-98ee-82e1e1349156 service nova] Releasing lock "refresh_cache-2ade2ed6-4725-4913-8ac4-14a96ced3e4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.548215] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquired lock "refresh_cache-2ade2ed6-4725-4913-8ac4-14a96ced3e4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.548385] env[69475]: DEBUG nova.network.neutron [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 850.607227] env[69475]: DEBUG nova.scheduler.client.report [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 850.849706] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': task-3508212, 'name': Rename_Task, 'duration_secs': 0.134377} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.849878] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 850.850062] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76a8a51f-a1a9-4dba-a858-850aff31590f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.856302] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Waiting for the task: (returnval){ [ 850.856302] env[69475]: value = "task-3508213" [ 850.856302] env[69475]: _type = "Task" [ 850.856302] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.864135] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': task-3508213, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.028403] env[69475]: DEBUG nova.compute.manager [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 851.055661] env[69475]: DEBUG nova.virt.hardware [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 851.055982] env[69475]: DEBUG nova.virt.hardware [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 851.056200] env[69475]: DEBUG nova.virt.hardware [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 851.056483] env[69475]: DEBUG nova.virt.hardware [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 851.056665] env[69475]: DEBUG nova.virt.hardware [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 851.056886] env[69475]: DEBUG nova.virt.hardware [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 851.057167] env[69475]: DEBUG nova.virt.hardware [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 851.057362] env[69475]: DEBUG nova.virt.hardware [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 851.057574] env[69475]: DEBUG nova.virt.hardware [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 851.057760] env[69475]: DEBUG nova.virt.hardware [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 851.057941] env[69475]: DEBUG nova.virt.hardware [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 851.059033] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8687862e-978c-4a3e-b823-b2eb830747d8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.067178] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95c2be5-a76c-4947-962c-42ae9b84a8a5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.087586] env[69475]: DEBUG nova.network.neutron [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 851.112157] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.100s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.114321] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.285s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.114551] env[69475]: DEBUG nova.objects.instance [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lazy-loading 'resources' on Instance uuid daef2117-0d9f-4c9e-99e7-1e8a65aa1f22 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 851.140279] env[69475]: INFO nova.scheduler.client.report [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Deleted allocations for instance 712e93b6-e797-4b9f-b39b-33373cede403 [ 851.365701] env[69475]: DEBUG oslo_vmware.api [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': task-3508213, 'name': PowerOnVM_Task, 'duration_secs': 0.419123} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.365996] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 851.366168] env[69475]: INFO nova.compute.manager [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Took 7.32 seconds to spawn the instance on the hypervisor. [ 851.366344] env[69475]: DEBUG nova.compute.manager [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 851.367115] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37eaff6d-c094-40a9-abb4-d53eca24218c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.447921] env[69475]: DEBUG nova.compute.manager [req-4e19c7ed-2ef7-4da5-b9a2-49002cff8ef2 req-7310f2ec-b91d-4cfb-b63c-96d82023a686 service nova] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Received event network-vif-plugged-b4a2d38d-faf3-4c3c-9bf1-a9ae9f50da7f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 851.448231] env[69475]: DEBUG oslo_concurrency.lockutils [req-4e19c7ed-2ef7-4da5-b9a2-49002cff8ef2 req-7310f2ec-b91d-4cfb-b63c-96d82023a686 service nova] Acquiring lock "619a87e7-097c-41af-8452-5437b82e7ebe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 851.448366] env[69475]: DEBUG oslo_concurrency.lockutils [req-4e19c7ed-2ef7-4da5-b9a2-49002cff8ef2 req-7310f2ec-b91d-4cfb-b63c-96d82023a686 service nova] Lock "619a87e7-097c-41af-8452-5437b82e7ebe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 851.448531] env[69475]: DEBUG oslo_concurrency.lockutils [req-4e19c7ed-2ef7-4da5-b9a2-49002cff8ef2 req-7310f2ec-b91d-4cfb-b63c-96d82023a686 service nova] Lock "619a87e7-097c-41af-8452-5437b82e7ebe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.448704] env[69475]: DEBUG nova.compute.manager [req-4e19c7ed-2ef7-4da5-b9a2-49002cff8ef2 req-7310f2ec-b91d-4cfb-b63c-96d82023a686 service nova] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] No waiting events found dispatching network-vif-plugged-b4a2d38d-faf3-4c3c-9bf1-a9ae9f50da7f {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 851.448868] env[69475]: WARNING nova.compute.manager [req-4e19c7ed-2ef7-4da5-b9a2-49002cff8ef2 req-7310f2ec-b91d-4cfb-b63c-96d82023a686 service nova] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Received unexpected event network-vif-plugged-b4a2d38d-faf3-4c3c-9bf1-a9ae9f50da7f for instance with vm_state building and task_state spawning. [ 851.477415] env[69475]: DEBUG nova.network.neutron [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Updating instance_info_cache with network_info: [{"id": "8bfd67d5-a8aa-4af9-bef4-2010baea67f2", "address": "fa:16:3e:bd:63:e6", "network": {"id": "4b76b2a6-9ccd-4b60-94cd-94c994c0211a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1279081688", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.210", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f2c2f5187934f5da108a1c96a3a3125", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6076d24d-3c8e-4bbb-ba96-a08fb27a73cc", "external-id": "nsx-vlan-transportzone-267", "segmentation_id": 267, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bfd67d5-a8", "ovs_interfaceid": "8bfd67d5-a8aa-4af9-bef4-2010baea67f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7c0a7b95-70af-4d8c-a572-06b7aa8b7a34", "address": "fa:16:3e:c0:67:b9", "network": {"id": "ebe0edf3-abeb-423f-9f16-0f0ced2abe33", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1504605295", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.177", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0f2c2f5187934f5da108a1c96a3a3125", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "233536d0-6913-4879-8442-42dcf1d4ecbb", "external-id": "nsx-vlan-transportzone-700", "segmentation_id": 700, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c0a7b95-70", "ovs_interfaceid": "7c0a7b95-70af-4d8c-a572-06b7aa8b7a34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.542430] env[69475]: DEBUG nova.network.neutron [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Successfully updated port: b4a2d38d-faf3-4c3c-9bf1-a9ae9f50da7f {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 851.653282] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1288af92-cf38-455c-8d36-253a44de3c0b tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "712e93b6-e797-4b9f-b39b-33373cede403" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.746s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 851.886306] env[69475]: INFO nova.compute.manager [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Took 49.42 seconds to build instance. [ 851.979879] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Releasing lock "refresh_cache-2ade2ed6-4725-4913-8ac4-14a96ced3e4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.980064] env[69475]: DEBUG nova.compute.manager [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Instance network_info: |[{"id": "8bfd67d5-a8aa-4af9-bef4-2010baea67f2", "address": "fa:16:3e:bd:63:e6", "network": {"id": "4b76b2a6-9ccd-4b60-94cd-94c994c0211a", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1279081688", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.210", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0f2c2f5187934f5da108a1c96a3a3125", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6076d24d-3c8e-4bbb-ba96-a08fb27a73cc", "external-id": "nsx-vlan-transportzone-267", "segmentation_id": 267, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bfd67d5-a8", "ovs_interfaceid": "8bfd67d5-a8aa-4af9-bef4-2010baea67f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7c0a7b95-70af-4d8c-a572-06b7aa8b7a34", "address": "fa:16:3e:c0:67:b9", "network": {"id": "ebe0edf3-abeb-423f-9f16-0f0ced2abe33", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1504605295", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.177", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0f2c2f5187934f5da108a1c96a3a3125", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "233536d0-6913-4879-8442-42dcf1d4ecbb", "external-id": "nsx-vlan-transportzone-700", "segmentation_id": 700, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c0a7b95-70", "ovs_interfaceid": "7c0a7b95-70af-4d8c-a572-06b7aa8b7a34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 851.980491] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:63:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6076d24d-3c8e-4bbb-ba96-a08fb27a73cc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8bfd67d5-a8aa-4af9-bef4-2010baea67f2', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:67:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '233536d0-6913-4879-8442-42dcf1d4ecbb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c0a7b95-70af-4d8c-a572-06b7aa8b7a34', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 851.990354] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 851.992714] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 851.993145] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cbd4486c-ec44-48b2-9eb7-057caa3fab37 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.018425] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 852.018425] env[69475]: value = "task-3508214" [ 852.018425] env[69475]: _type = "Task" [ 852.018425] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.026966] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508214, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.042564] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Acquiring lock "refresh_cache-619a87e7-097c-41af-8452-5437b82e7ebe" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.042749] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Acquired lock "refresh_cache-619a87e7-097c-41af-8452-5437b82e7ebe" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.042923] env[69475]: DEBUG nova.network.neutron [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 852.107051] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2ac84d-2154-4795-b770-e7f1b374150e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.115574] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da9994e8-c404-4b7d-b586-c13c81dbde43 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.145700] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4391f042-ce04-44e5-b95b-7a1dfda18176 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.154090] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ba1a02-5297-49a7-92a5-d2f20b5c05b4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.167668] env[69475]: DEBUG nova.compute.provider_tree [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.388237] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46199b42-26e0-48a8-8ebd-3173138b336a tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Lock "56f0e59a-1c37-4977-81dc-da1a274ce7e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.442s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.442967] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Acquiring lock "56f0e59a-1c37-4977-81dc-da1a274ce7e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.442967] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Lock "56f0e59a-1c37-4977-81dc-da1a274ce7e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.442967] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Acquiring lock "56f0e59a-1c37-4977-81dc-da1a274ce7e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.443154] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Lock "56f0e59a-1c37-4977-81dc-da1a274ce7e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.443308] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Lock "56f0e59a-1c37-4977-81dc-da1a274ce7e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.446047] env[69475]: INFO nova.compute.manager [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Terminating instance [ 852.529954] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508214, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.578040] env[69475]: DEBUG nova.network.neutron [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 852.671408] env[69475]: DEBUG nova.scheduler.client.report [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 852.736467] env[69475]: DEBUG nova.network.neutron [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Updating instance_info_cache with network_info: [{"id": "b4a2d38d-faf3-4c3c-9bf1-a9ae9f50da7f", "address": "fa:16:3e:37:8a:a0", "network": {"id": "1386fa93-685c-4527-93b5-b1bceacc5694", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-163296686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35487483c5554eddbe1994b81ca45a13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4a2d38d-fa", "ovs_interfaceid": "b4a2d38d-faf3-4c3c-9bf1-a9ae9f50da7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.949866] env[69475]: DEBUG nova.compute.manager [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 852.950223] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 852.951133] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f3e946-d631-4361-8fb3-a1a4d1e5f4af {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.958806] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 852.960019] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ec115e32-51ff-40e8-94b6-ee24b19dc6ff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.965362] env[69475]: DEBUG oslo_vmware.api [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Waiting for the task: (returnval){ [ 852.965362] env[69475]: value = "task-3508215" [ 852.965362] env[69475]: _type = "Task" [ 852.965362] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.973155] env[69475]: DEBUG oslo_vmware.api [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': task-3508215, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.028351] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508214, 'name': CreateVM_Task, 'duration_secs': 0.906063} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.029024] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 853.029411] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.029606] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.029952] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 853.030263] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e35761b2-1b5b-4e1a-90b3-eaaa5c1e7d9f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.035176] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 853.035176] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c31a84-1fa4-4add-fcc4-465aa38b87cc" [ 853.035176] env[69475]: _type = "Task" [ 853.035176] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.043325] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c31a84-1fa4-4add-fcc4-465aa38b87cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.178797] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.064s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.182311] env[69475]: DEBUG oslo_concurrency.lockutils [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 41.078s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 853.202522] env[69475]: INFO nova.scheduler.client.report [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Deleted allocations for instance daef2117-0d9f-4c9e-99e7-1e8a65aa1f22 [ 853.242736] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Releasing lock "refresh_cache-619a87e7-097c-41af-8452-5437b82e7ebe" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.242736] env[69475]: DEBUG nova.compute.manager [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Instance network_info: |[{"id": "b4a2d38d-faf3-4c3c-9bf1-a9ae9f50da7f", "address": "fa:16:3e:37:8a:a0", "network": {"id": "1386fa93-685c-4527-93b5-b1bceacc5694", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-163296686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35487483c5554eddbe1994b81ca45a13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4a2d38d-fa", "ovs_interfaceid": "b4a2d38d-faf3-4c3c-9bf1-a9ae9f50da7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 853.244016] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:8a:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5c8dbe25-bca7-4d91-b577-193b8b2aad8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b4a2d38d-faf3-4c3c-9bf1-a9ae9f50da7f', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 853.250595] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Creating folder: Project (35487483c5554eddbe1994b81ca45a13). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 853.251568] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a80e4632-704c-479f-bc38-f8784d1ec3d3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.263365] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Created folder: Project (35487483c5554eddbe1994b81ca45a13) in parent group-v700823. [ 853.263553] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Creating folder: Instances. Parent ref: group-v701007. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 853.263777] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fe38ae58-1cf6-4c4c-a841-064cb744f99b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.272998] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Created folder: Instances in parent group-v701007. [ 853.273260] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 853.273451] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 853.273646] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-63a9a3b5-576f-485f-8fad-357edd5124c7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.292503] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 853.292503] env[69475]: value = "task-3508218" [ 853.292503] env[69475]: _type = "Task" [ 853.292503] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.300308] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508218, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.476297] env[69475]: DEBUG oslo_vmware.api [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': task-3508215, 'name': PowerOffVM_Task, 'duration_secs': 0.203642} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.476605] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 853.476807] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 853.477077] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0a1fcc3c-1c8e-4b73-a12d-96c1c9ae3c8c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.500259] env[69475]: DEBUG nova.compute.manager [req-fa70da92-4f8d-482e-97fb-64017e4d46e3 req-3882fac6-85bc-4c50-b100-bf9798c28201 service nova] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Received event network-changed-b4a2d38d-faf3-4c3c-9bf1-a9ae9f50da7f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 853.500500] env[69475]: DEBUG nova.compute.manager [req-fa70da92-4f8d-482e-97fb-64017e4d46e3 req-3882fac6-85bc-4c50-b100-bf9798c28201 service nova] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Refreshing instance network info cache due to event network-changed-b4a2d38d-faf3-4c3c-9bf1-a9ae9f50da7f. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 853.500882] env[69475]: DEBUG oslo_concurrency.lockutils [req-fa70da92-4f8d-482e-97fb-64017e4d46e3 req-3882fac6-85bc-4c50-b100-bf9798c28201 service nova] Acquiring lock "refresh_cache-619a87e7-097c-41af-8452-5437b82e7ebe" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.501080] env[69475]: DEBUG oslo_concurrency.lockutils [req-fa70da92-4f8d-482e-97fb-64017e4d46e3 req-3882fac6-85bc-4c50-b100-bf9798c28201 service nova] Acquired lock "refresh_cache-619a87e7-097c-41af-8452-5437b82e7ebe" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.501283] env[69475]: DEBUG nova.network.neutron [req-fa70da92-4f8d-482e-97fb-64017e4d46e3 req-3882fac6-85bc-4c50-b100-bf9798c28201 service nova] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Refreshing network info cache for port b4a2d38d-faf3-4c3c-9bf1-a9ae9f50da7f {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 853.540480] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 853.540763] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 853.540870] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Deleting the datastore file [datastore1] 56f0e59a-1c37-4977-81dc-da1a274ce7e7 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 853.541492] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-441d672f-ff36-4ec0-a4f8-dc3239ab8677 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.546807] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c31a84-1fa4-4add-fcc4-465aa38b87cc, 'name': SearchDatastore_Task, 'duration_secs': 0.011899} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.547403] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.547632] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 853.547861] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.548012] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.548214] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 853.548472] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-623ad4cd-9f93-45ec-bf46-f3cb54b1e24a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.551257] env[69475]: DEBUG oslo_vmware.api [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Waiting for the task: (returnval){ [ 853.551257] env[69475]: value = "task-3508220" [ 853.551257] env[69475]: _type = "Task" [ 853.551257] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.556351] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 853.556554] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 853.559997] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-458ae3f0-a327-4796-9f2f-bf1813124f87 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.562282] env[69475]: DEBUG oslo_vmware.api [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': task-3508220, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.565063] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 853.565063] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5212401d-8c83-bab0-7c67-da3645929f36" [ 853.565063] env[69475]: _type = "Task" [ 853.565063] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.573348] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5212401d-8c83-bab0-7c67-da3645929f36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.686866] env[69475]: INFO nova.compute.claims [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 853.710837] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aa358ac0-59e3-42fd-ae50-d4bd39f6e1ee tempest-ListServersNegativeTestJSON-1197111871 tempest-ListServersNegativeTestJSON-1197111871-project-member] Lock "daef2117-0d9f-4c9e-99e7-1e8a65aa1f22" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.616s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 853.802866] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508218, 'name': CreateVM_Task, 'duration_secs': 0.436083} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.802984] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 853.803693] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.803862] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.804211] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 853.804472] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f51f671-a141-46ef-87a0-ed5defc89673 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.808936] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Waiting for the task: (returnval){ [ 853.808936] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5214471d-e540-53ba-04df-4b9ef62f25c7" [ 853.808936] env[69475]: _type = "Task" [ 853.808936] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.816228] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5214471d-e540-53ba-04df-4b9ef62f25c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.063177] env[69475]: DEBUG oslo_vmware.api [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Task: {'id': task-3508220, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128977} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.063520] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 854.063575] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 854.063755] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 854.063957] env[69475]: INFO nova.compute.manager [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Took 1.11 seconds to destroy the instance on the hypervisor. [ 854.064180] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 854.065291] env[69475]: DEBUG nova.compute.manager [-] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 854.065374] env[69475]: DEBUG nova.network.neutron [-] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 854.075367] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5212401d-8c83-bab0-7c67-da3645929f36, 'name': SearchDatastore_Task, 'duration_secs': 0.007685} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.076121] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d1b6179-df08-4dca-8313-a8d512624cb9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.081057] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 854.081057] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a04f0f-5fff-db84-4eba-8c579b175281" [ 854.081057] env[69475]: _type = "Task" [ 854.081057] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.088458] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a04f0f-5fff-db84-4eba-8c579b175281, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.193209] env[69475]: INFO nova.compute.resource_tracker [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Updating resource usage from migration 7e88bc9f-10f3-40da-8081-c14e8c051ac4 [ 854.309221] env[69475]: DEBUG nova.network.neutron [req-fa70da92-4f8d-482e-97fb-64017e4d46e3 req-3882fac6-85bc-4c50-b100-bf9798c28201 service nova] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Updated VIF entry in instance network info cache for port b4a2d38d-faf3-4c3c-9bf1-a9ae9f50da7f. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 854.309583] env[69475]: DEBUG nova.network.neutron [req-fa70da92-4f8d-482e-97fb-64017e4d46e3 req-3882fac6-85bc-4c50-b100-bf9798c28201 service nova] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Updating instance_info_cache with network_info: [{"id": "b4a2d38d-faf3-4c3c-9bf1-a9ae9f50da7f", "address": "fa:16:3e:37:8a:a0", "network": {"id": "1386fa93-685c-4527-93b5-b1bceacc5694", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-163296686-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "35487483c5554eddbe1994b81ca45a13", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5c8dbe25-bca7-4d91-b577-193b8b2aad8d", "external-id": "nsx-vlan-transportzone-643", "segmentation_id": 643, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb4a2d38d-fa", "ovs_interfaceid": "b4a2d38d-faf3-4c3c-9bf1-a9ae9f50da7f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.320130] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5214471d-e540-53ba-04df-4b9ef62f25c7, 'name': SearchDatastore_Task, 'duration_secs': 0.016248} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.320424] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.320637] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 854.320837] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.595636] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a04f0f-5fff-db84-4eba-8c579b175281, 'name': SearchDatastore_Task, 'duration_secs': 0.008807} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.598416] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.598734] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 2ade2ed6-4725-4913-8ac4-14a96ced3e4b/2ade2ed6-4725-4913-8ac4-14a96ced3e4b.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 854.599263] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.599517] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 854.599823] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-238d082d-b354-49e8-80e2-6379c8709118 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.603477] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-32628d30-038d-4b12-acdd-cedfecc7bc80 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.614654] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 854.614874] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 854.615742] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 854.615742] env[69475]: value = "task-3508221" [ 854.615742] env[69475]: _type = "Task" [ 854.615742] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.615944] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc31d119-4723-4606-a38c-1d9180e73bd9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.627018] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Waiting for the task: (returnval){ [ 854.627018] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52965ffa-3ef3-197e-046d-84e4e2d8a3c6" [ 854.627018] env[69475]: _type = "Task" [ 854.627018] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.631751] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3508221, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.644305] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52965ffa-3ef3-197e-046d-84e4e2d8a3c6, 'name': SearchDatastore_Task, 'duration_secs': 0.008659} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.644305] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0dbc2013-d9c5-4b4b-80cf-a5d99c3687d5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.646580] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Waiting for the task: (returnval){ [ 854.646580] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5239b142-7d51-b7ca-8b64-347c50a6bda0" [ 854.646580] env[69475]: _type = "Task" [ 854.646580] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.657300] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5239b142-7d51-b7ca-8b64-347c50a6bda0, 'name': SearchDatastore_Task, 'duration_secs': 0.007583} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.657568] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.657831] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 619a87e7-097c-41af-8452-5437b82e7ebe/619a87e7-097c-41af-8452-5437b82e7ebe.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 854.658108] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-605e32a0-ce2d-4094-a09a-5e9e7e0c74d4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.666910] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Waiting for the task: (returnval){ [ 854.666910] env[69475]: value = "task-3508222" [ 854.666910] env[69475]: _type = "Task" [ 854.666910] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.672777] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32acbf43-29fb-4072-b450-caec26ae9fcb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.678472] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': task-3508222, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.682973] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3faac5fa-5511-4c85-9180-04243aad56a0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.714616] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a6ea8c-51eb-41e8-958f-ef9a1abe5fa3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.722090] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35aab94-4b58-4940-8e87-ec22939d6ddb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.735381] env[69475]: DEBUG nova.compute.provider_tree [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.816052] env[69475]: DEBUG oslo_concurrency.lockutils [req-fa70da92-4f8d-482e-97fb-64017e4d46e3 req-3882fac6-85bc-4c50-b100-bf9798c28201 service nova] Releasing lock "refresh_cache-619a87e7-097c-41af-8452-5437b82e7ebe" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.928946] env[69475]: DEBUG nova.network.neutron [-] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.129420] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3508221, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514829} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.129773] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 2ade2ed6-4725-4913-8ac4-14a96ced3e4b/2ade2ed6-4725-4913-8ac4-14a96ced3e4b.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 855.129889] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 855.130159] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2f9a3eef-c07e-4178-be84-eb8c0b626b10 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.136060] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 855.136060] env[69475]: value = "task-3508223" [ 855.136060] env[69475]: _type = "Task" [ 855.136060] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.155539] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3508223, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.178745] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': task-3508222, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.239124] env[69475]: DEBUG nova.scheduler.client.report [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 855.432359] env[69475]: INFO nova.compute.manager [-] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Took 1.37 seconds to deallocate network for instance. [ 855.602701] env[69475]: DEBUG nova.compute.manager [req-31e647bd-1dfa-44ef-a3b6-773fb0945191 req-b53def43-572b-496a-b75e-f452e6250fc0 service nova] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Received event network-vif-deleted-64fc6b26-9e1f-4a44-8001-126576f96e62 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 855.646611] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3508223, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081226} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.647185] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 855.649014] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e01fdcf-dfe5-43fc-a0fa-d4537b519ff2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.679817] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 2ade2ed6-4725-4913-8ac4-14a96ced3e4b/2ade2ed6-4725-4913-8ac4-14a96ced3e4b.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 855.679817] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a2be570-02b8-4152-bc43-f15b19b9ebaf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.705271] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': task-3508222, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.76989} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.706275] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 619a87e7-097c-41af-8452-5437b82e7ebe/619a87e7-097c-41af-8452-5437b82e7ebe.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 855.706524] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 855.706776] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 855.706776] env[69475]: value = "task-3508224" [ 855.706776] env[69475]: _type = "Task" [ 855.706776] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.707244] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e1caf12f-b8af-4e79-999d-241e16bab676 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.716278] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Waiting for the task: (returnval){ [ 855.716278] env[69475]: value = "task-3508225" [ 855.716278] env[69475]: _type = "Task" [ 855.716278] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.720252] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3508224, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.727014] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': task-3508225, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.746901] env[69475]: DEBUG oslo_concurrency.lockutils [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.565s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.747128] env[69475]: INFO nova.compute.manager [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Migrating [ 855.754509] env[69475]: DEBUG oslo_concurrency.lockutils [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.010s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.754781] env[69475]: DEBUG nova.objects.instance [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lazy-loading 'resources' on Instance uuid 2e7066ca-162e-4465-a9c1-5422510e4c0f {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 855.940812] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.219446] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3508224, 'name': ReconfigVM_Task, 'duration_secs': 0.302008} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.219810] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 2ade2ed6-4725-4913-8ac4-14a96ced3e4b/2ade2ed6-4725-4913-8ac4-14a96ced3e4b.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 856.223394] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d982af08-a69a-400a-8d54-9093cdb3fdbe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.231583] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': task-3508225, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.232860] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 856.232860] env[69475]: value = "task-3508226" [ 856.232860] env[69475]: _type = "Task" [ 856.232860] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.241124] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3508226, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.264609] env[69475]: DEBUG oslo_concurrency.lockutils [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "refresh_cache-86464a01-e034-43b6-a6d5-45f9e3b6715b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.264834] env[69475]: DEBUG oslo_concurrency.lockutils [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "refresh_cache-86464a01-e034-43b6-a6d5-45f9e3b6715b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.265166] env[69475]: DEBUG nova.network.neutron [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 856.702601] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e82e65-9c69-476d-87c5-1cb5ff9edad9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.710167] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e284d90d-e716-4b0f-9c96-18426dde6995 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.748270] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46569fd-dcec-4965-a47e-05f2925cf513 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.755102] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': task-3508225, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.778709} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.759793] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 856.760128] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3508226, 'name': Rename_Task, 'duration_secs': 0.139748} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.760845] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ab5b59-04cb-4911-b971-262a1407beb4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.763282] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 856.764489] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d75a2be3-5353-46c5-a0fa-758a90493c60 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.768439] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9610605c-fb60-431a-bfc1-0bd31c4ba0ed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.781595] env[69475]: DEBUG nova.compute.provider_tree [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.802171] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] 619a87e7-097c-41af-8452-5437b82e7ebe/619a87e7-097c-41af-8452-5437b82e7ebe.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 856.804053] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 856.804053] env[69475]: value = "task-3508227" [ 856.804053] env[69475]: _type = "Task" [ 856.804053] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.804252] env[69475]: DEBUG nova.scheduler.client.report [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 856.807666] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6eaba7bc-c780-439f-8a7d-b842513b9a2f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.824336] env[69475]: DEBUG oslo_concurrency.lockutils [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.070s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.831060] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.492s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.832671] env[69475]: INFO nova.compute.claims [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 856.841850] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3508227, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.843480] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Waiting for the task: (returnval){ [ 856.843480] env[69475]: value = "task-3508228" [ 856.843480] env[69475]: _type = "Task" [ 856.843480] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.851963] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': task-3508228, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.856156] env[69475]: INFO nova.scheduler.client.report [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Deleted allocations for instance 2e7066ca-162e-4465-a9c1-5422510e4c0f [ 857.131365] env[69475]: DEBUG nova.network.neutron [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Updating instance_info_cache with network_info: [{"id": "858c37b6-4824-46d3-9dff-c0e0d91c47b5", "address": "fa:16:3e:8e:8b:e4", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap858c37b6-48", "ovs_interfaceid": "858c37b6-4824-46d3-9dff-c0e0d91c47b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.333378] env[69475]: DEBUG oslo_vmware.api [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3508227, 'name': PowerOnVM_Task, 'duration_secs': 0.498619} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.333681] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 857.333913] env[69475]: INFO nova.compute.manager [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Took 11.70 seconds to spawn the instance on the hypervisor. [ 857.334164] env[69475]: DEBUG nova.compute.manager [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 857.334969] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83228308-64f0-4cf0-9bce-5875a6db506c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.354147] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': task-3508228, 'name': ReconfigVM_Task, 'duration_secs': 0.376343} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.354369] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Reconfigured VM instance instance-0000003f to attach disk [datastore1] 619a87e7-097c-41af-8452-5437b82e7ebe/619a87e7-097c-41af-8452-5437b82e7ebe.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 857.354945] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8f90e71d-6703-4eef-95d4-95924a1fa9e5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.362842] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Waiting for the task: (returnval){ [ 857.362842] env[69475]: value = "task-3508229" [ 857.362842] env[69475]: _type = "Task" [ 857.362842] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.365746] env[69475]: DEBUG oslo_concurrency.lockutils [None req-228cf427-7dce-4c95-a4ff-089fbe598289 tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "2e7066ca-162e-4465-a9c1-5422510e4c0f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.612s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 857.372461] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': task-3508229, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.633616] env[69475]: DEBUG oslo_concurrency.lockutils [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "refresh_cache-86464a01-e034-43b6-a6d5-45f9e3b6715b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 857.856447] env[69475]: INFO nova.compute.manager [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Took 50.51 seconds to build instance. [ 857.872771] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': task-3508229, 'name': Rename_Task, 'duration_secs': 0.266028} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.872771] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 857.872974] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-be185485-f2bc-419e-a091-8331ad266290 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.881252] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Waiting for the task: (returnval){ [ 857.881252] env[69475]: value = "task-3508230" [ 857.881252] env[69475]: _type = "Task" [ 857.881252] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.889812] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': task-3508230, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.330738] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336270aa-7901-4b0a-aae6-bc84f7df76f2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.343014] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa5552da-d976-4e46-9e51-e53c5d4d4499 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.374726] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d5c12d65-63f2-4a7c-a860-af72a5d4b20e tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "2ade2ed6-4725-4913-8ac4-14a96ced3e4b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.500s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.374726] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee537202-929e-4e0a-8898-3e23e873f211 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.381072] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feea015a-384c-49c8-b06b-ba8b74d0a867 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.400633] env[69475]: DEBUG nova.compute.provider_tree [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.407285] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': task-3508230, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.895968] env[69475]: DEBUG oslo_vmware.api [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': task-3508230, 'name': PowerOnVM_Task, 'duration_secs': 0.665253} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.896250] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 858.896302] env[69475]: INFO nova.compute.manager [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Took 7.87 seconds to spawn the instance on the hypervisor. [ 858.896479] env[69475]: DEBUG nova.compute.manager [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 858.897508] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76db04f3-e868-4bf4-839b-0cc30e7ad4a8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.908797] env[69475]: DEBUG nova.scheduler.client.report [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 858.961870] env[69475]: DEBUG oslo_concurrency.lockutils [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquiring lock "2ade2ed6-4725-4913-8ac4-14a96ced3e4b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.961965] env[69475]: DEBUG oslo_concurrency.lockutils [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "2ade2ed6-4725-4913-8ac4-14a96ced3e4b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.962216] env[69475]: DEBUG oslo_concurrency.lockutils [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquiring lock "2ade2ed6-4725-4913-8ac4-14a96ced3e4b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 858.962416] env[69475]: DEBUG oslo_concurrency.lockutils [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "2ade2ed6-4725-4913-8ac4-14a96ced3e4b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 858.962589] env[69475]: DEBUG oslo_concurrency.lockutils [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "2ade2ed6-4725-4913-8ac4-14a96ced3e4b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.965248] env[69475]: INFO nova.compute.manager [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Terminating instance [ 859.156135] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70ddd9c-8029-4938-84a0-94bb58551050 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.176902] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Updating instance '86464a01-e034-43b6-a6d5-45f9e3b6715b' progress to 0 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 859.419808] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.587s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.419974] env[69475]: DEBUG nova.compute.manager [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 859.426295] env[69475]: INFO nova.compute.manager [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Took 50.02 seconds to build instance. [ 859.426707] env[69475]: DEBUG oslo_concurrency.lockutils [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.485s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.428319] env[69475]: INFO nova.compute.claims [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 859.472022] env[69475]: DEBUG nova.compute.manager [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 859.472022] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 859.472022] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d42b2f41-5d61-455f-b1fa-0afda99efacd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.481526] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 859.481924] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a447ebc0-698f-4881-92d6-6bb25d1e5a8f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.487715] env[69475]: DEBUG oslo_vmware.api [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 859.487715] env[69475]: value = "task-3508232" [ 859.487715] env[69475]: _type = "Task" [ 859.487715] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.496526] env[69475]: DEBUG oslo_vmware.api [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3508232, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.532984] env[69475]: DEBUG oslo_concurrency.lockutils [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Acquiring lock "0a65565c-c679-47e5-8606-832fe3876af6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.533328] env[69475]: DEBUG oslo_concurrency.lockutils [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Lock "0a65565c-c679-47e5-8606-832fe3876af6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.684936] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 859.685276] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-85783376-753d-4c75-979a-63a3992fe2fa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.697504] env[69475]: DEBUG oslo_vmware.api [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 859.697504] env[69475]: value = "task-3508233" [ 859.697504] env[69475]: _type = "Task" [ 859.697504] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.706971] env[69475]: DEBUG oslo_vmware.api [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508233, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.925958] env[69475]: DEBUG nova.compute.utils [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 859.927414] env[69475]: DEBUG nova.compute.manager [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 859.931065] env[69475]: DEBUG nova.network.neutron [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 859.933423] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0de63c10-ceb0-4dbc-b918-91a970dd03b2 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Lock "619a87e7-097c-41af-8452-5437b82e7ebe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 77.074s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.972133] env[69475]: DEBUG nova.policy [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2570dc62118b4091b27c1e64f4752558', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '947a74cfc69b45dbb3aa09060c5b76f4', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 860.001339] env[69475]: DEBUG oslo_vmware.api [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3508232, 'name': PowerOffVM_Task, 'duration_secs': 0.28754} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.001600] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 860.001765] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 860.002050] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad0dfc5b-25f8-443a-88a3-6e0e2a7a80f3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.036665] env[69475]: DEBUG nova.compute.manager [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 860.106816] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 860.106912] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 860.107062] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Deleting the datastore file [datastore1] 2ade2ed6-4725-4913-8ac4-14a96ced3e4b {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 860.107322] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a8ddcb8b-1c84-46f8-a9f4-f1dbc88e172c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.113338] env[69475]: DEBUG oslo_vmware.api [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for the task: (returnval){ [ 860.113338] env[69475]: value = "task-3508235" [ 860.113338] env[69475]: _type = "Task" [ 860.113338] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.121866] env[69475]: DEBUG oslo_vmware.api [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3508235, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.209286] env[69475]: DEBUG oslo_vmware.api [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508233, 'name': PowerOffVM_Task, 'duration_secs': 0.252787} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.209624] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 860.209821] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Updating instance '86464a01-e034-43b6-a6d5-45f9e3b6715b' progress to 17 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 860.295072] env[69475]: DEBUG nova.network.neutron [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Successfully created port: 9e1b604b-8b51-4d1d-a716-b433d77aa5a3 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 860.431696] env[69475]: DEBUG nova.compute.utils [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 860.559229] env[69475]: DEBUG oslo_concurrency.lockutils [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.627699] env[69475]: DEBUG oslo_vmware.api [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Task: {'id': task-3508235, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159801} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.628139] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 860.628231] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 860.628406] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 860.628576] env[69475]: INFO nova.compute.manager [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Took 1.16 seconds to destroy the instance on the hypervisor. [ 860.628815] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 860.628998] env[69475]: DEBUG nova.compute.manager [-] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 860.629109] env[69475]: DEBUG nova.network.neutron [-] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 860.719090] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 860.719385] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 860.719612] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 860.719825] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 860.720012] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 860.720852] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 860.720852] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 860.720852] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 860.720852] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 860.721053] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 860.721164] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 860.729503] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-056581ce-ac00-491b-a3ca-e5d64bddfad9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.748635] env[69475]: DEBUG oslo_vmware.api [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 860.748635] env[69475]: value = "task-3508236" [ 860.748635] env[69475]: _type = "Task" [ 860.748635] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.761027] env[69475]: DEBUG oslo_vmware.api [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508236, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.938021] env[69475]: DEBUG nova.compute.manager [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 860.949800] env[69475]: DEBUG oslo_concurrency.lockutils [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "93607154-f135-4925-9c3a-a97051535b00" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.950042] env[69475]: DEBUG oslo_concurrency.lockutils [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "93607154-f135-4925-9c3a-a97051535b00" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.950264] env[69475]: DEBUG oslo_concurrency.lockutils [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "93607154-f135-4925-9c3a-a97051535b00-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.950441] env[69475]: DEBUG oslo_concurrency.lockutils [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "93607154-f135-4925-9c3a-a97051535b00-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.950602] env[69475]: DEBUG oslo_concurrency.lockutils [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "93607154-f135-4925-9c3a-a97051535b00-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.953167] env[69475]: INFO nova.compute.manager [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Terminating instance [ 860.982480] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40d73cb4-9bad-4c0c-bd7c-7924f66e30e2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.991068] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7955fc64-552d-4e4c-beec-ce52d70051e7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.024259] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc70d84-7970-4dde-a470-7e504432556f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.027809] env[69475]: DEBUG nova.compute.manager [req-48162cac-a637-45e1-933d-1480263e783c req-1d1dc009-09f8-4ec8-b4e1-3621e25c92c8 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Received event network-vif-deleted-8bfd67d5-a8aa-4af9-bef4-2010baea67f2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 861.027989] env[69475]: INFO nova.compute.manager [req-48162cac-a637-45e1-933d-1480263e783c req-1d1dc009-09f8-4ec8-b4e1-3621e25c92c8 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Neutron deleted interface 8bfd67d5-a8aa-4af9-bef4-2010baea67f2; detaching it from the instance and deleting it from the info cache [ 861.028249] env[69475]: DEBUG nova.network.neutron [req-48162cac-a637-45e1-933d-1480263e783c req-1d1dc009-09f8-4ec8-b4e1-3621e25c92c8 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Updating instance_info_cache with network_info: [{"id": "7c0a7b95-70af-4d8c-a572-06b7aa8b7a34", "address": "fa:16:3e:c0:67:b9", "network": {"id": "ebe0edf3-abeb-423f-9f16-0f0ced2abe33", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1504605295", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.177", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "0f2c2f5187934f5da108a1c96a3a3125", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "233536d0-6913-4879-8442-42dcf1d4ecbb", "external-id": "nsx-vlan-transportzone-700", "segmentation_id": 700, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c0a7b95-70", "ovs_interfaceid": "7c0a7b95-70af-4d8c-a572-06b7aa8b7a34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.036368] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2c21d5-647e-4ea3-a7b8-6e9486fd6d79 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.050342] env[69475]: DEBUG nova.compute.provider_tree [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 861.260349] env[69475]: DEBUG oslo_vmware.api [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508236, 'name': ReconfigVM_Task, 'duration_secs': 0.189738} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.260625] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Updating instance '86464a01-e034-43b6-a6d5-45f9e3b6715b' progress to 33 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 861.459719] env[69475]: DEBUG nova.compute.manager [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 861.459895] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 861.460850] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ccc159c-d5f6-4b10-ac4d-5960d741408b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.469708] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 861.470014] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed31b748-50ed-4348-a907-1fd2d56c0fa5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.477524] env[69475]: DEBUG oslo_vmware.api [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 861.477524] env[69475]: value = "task-3508237" [ 861.477524] env[69475]: _type = "Task" [ 861.477524] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.488445] env[69475]: DEBUG oslo_vmware.api [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508237, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.531805] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a589b674-2ce1-426b-82fd-7db10ca20b24 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.542087] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9efa48c9-c5d6-4398-9070-16734b877fb3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.557211] env[69475]: DEBUG nova.scheduler.client.report [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 861.585558] env[69475]: DEBUG nova.compute.manager [req-48162cac-a637-45e1-933d-1480263e783c req-1d1dc009-09f8-4ec8-b4e1-3621e25c92c8 service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Detach interface failed, port_id=8bfd67d5-a8aa-4af9-bef4-2010baea67f2, reason: Instance 2ade2ed6-4725-4913-8ac4-14a96ced3e4b could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 861.622446] env[69475]: DEBUG nova.network.neutron [-] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.767695] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 861.767695] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 861.767695] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 861.767900] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 861.767944] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 861.768484] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 861.768764] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 861.768917] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 861.769101] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 861.769272] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 861.769446] env[69475]: DEBUG nova.virt.hardware [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 861.778815] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Reconfiguring VM instance instance-00000035 to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 861.778815] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46c54e87-908c-4bd0-9c88-2b8bfe03ccb3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.799891] env[69475]: DEBUG oslo_vmware.api [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 861.799891] env[69475]: value = "task-3508238" [ 861.799891] env[69475]: _type = "Task" [ 861.799891] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.813318] env[69475]: DEBUG oslo_vmware.api [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508238, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.842248] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Acquiring lock "619a87e7-097c-41af-8452-5437b82e7ebe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.843823] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Lock "619a87e7-097c-41af-8452-5437b82e7ebe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.843823] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Acquiring lock "619a87e7-097c-41af-8452-5437b82e7ebe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.844046] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Lock "619a87e7-097c-41af-8452-5437b82e7ebe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 861.844204] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Lock "619a87e7-097c-41af-8452-5437b82e7ebe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.846952] env[69475]: INFO nova.compute.manager [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Terminating instance [ 861.892673] env[69475]: DEBUG nova.network.neutron [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Successfully updated port: 9e1b604b-8b51-4d1d-a716-b433d77aa5a3 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 861.949170] env[69475]: DEBUG nova.compute.manager [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 861.979973] env[69475]: DEBUG nova.virt.hardware [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:35:27Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1397452327',id=24,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-940465052',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 861.980238] env[69475]: DEBUG nova.virt.hardware [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 861.980405] env[69475]: DEBUG nova.virt.hardware [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 861.980570] env[69475]: DEBUG nova.virt.hardware [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 861.980711] env[69475]: DEBUG nova.virt.hardware [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 861.980853] env[69475]: DEBUG nova.virt.hardware [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 861.981070] env[69475]: DEBUG nova.virt.hardware [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 861.981259] env[69475]: DEBUG nova.virt.hardware [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 861.981398] env[69475]: DEBUG nova.virt.hardware [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 861.981559] env[69475]: DEBUG nova.virt.hardware [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 861.981729] env[69475]: DEBUG nova.virt.hardware [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 861.982729] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42ddc81-87dc-4161-aa1a-15415babf619 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.994257] env[69475]: DEBUG oslo_vmware.api [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508237, 'name': PowerOffVM_Task, 'duration_secs': 0.407463} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.996115] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 861.996449] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 861.996634] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e7b36cae-049d-4117-8655-65bdf06c9060 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.999242] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d91d635-1e5f-4208-97c3-4e844a41434b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.063336] env[69475]: DEBUG oslo_concurrency.lockutils [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.636s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 862.063996] env[69475]: DEBUG nova.compute.manager [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 862.071027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.793s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.071027] env[69475]: DEBUG nova.objects.instance [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lazy-loading 'resources' on Instance uuid b71882d4-537d-4a90-b43d-f8ac4ca0d90c {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 862.082150] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.082501] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.082795] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Deleting the datastore file [datastore1] 93607154-f135-4925-9c3a-a97051535b00 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.083351] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-035f4c38-009b-458a-982d-bf6bedd650de {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.092216] env[69475]: DEBUG oslo_vmware.api [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for the task: (returnval){ [ 862.092216] env[69475]: value = "task-3508240" [ 862.092216] env[69475]: _type = "Task" [ 862.092216] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.106388] env[69475]: DEBUG oslo_vmware.api [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508240, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.122734] env[69475]: INFO nova.compute.manager [-] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Took 1.49 seconds to deallocate network for instance. [ 862.312234] env[69475]: DEBUG oslo_vmware.api [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508238, 'name': ReconfigVM_Task, 'duration_secs': 0.316353} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.312569] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Reconfigured VM instance instance-00000035 to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 862.313406] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ea77a1-64c8-45b1-b1e0-a1a9ea7ac521 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.335975] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] 86464a01-e034-43b6-a6d5-45f9e3b6715b/86464a01-e034-43b6-a6d5-45f9e3b6715b.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 862.336309] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de98a631-9ba8-496a-b479-6dac81f13b63 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.356782] env[69475]: DEBUG nova.compute.manager [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 862.357092] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 862.357462] env[69475]: DEBUG oslo_vmware.api [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 862.357462] env[69475]: value = "task-3508241" [ 862.357462] env[69475]: _type = "Task" [ 862.357462] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.358250] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd69dd96-f866-4461-879a-686eea8f4c40 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.368471] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 862.372084] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9729207-a57c-4edf-b2c0-c75e391e7b5f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.374232] env[69475]: DEBUG oslo_vmware.api [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508241, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.378591] env[69475]: DEBUG oslo_vmware.api [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Waiting for the task: (returnval){ [ 862.378591] env[69475]: value = "task-3508242" [ 862.378591] env[69475]: _type = "Task" [ 862.378591] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.387887] env[69475]: DEBUG oslo_vmware.api [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': task-3508242, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.396642] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquiring lock "refresh_cache-a87da6e4-d7ec-4624-94bc-b76ade04d511" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.396995] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquired lock "refresh_cache-a87da6e4-d7ec-4624-94bc-b76ade04d511" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 862.397870] env[69475]: DEBUG nova.network.neutron [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 862.575599] env[69475]: DEBUG nova.compute.utils [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 862.578069] env[69475]: DEBUG nova.compute.manager [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 862.578069] env[69475]: DEBUG nova.network.neutron [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 862.603864] env[69475]: DEBUG oslo_vmware.api [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Task: {'id': task-3508240, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.404662} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.604204] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 862.604431] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 862.604615] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 862.604735] env[69475]: INFO nova.compute.manager [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] [instance: 93607154-f135-4925-9c3a-a97051535b00] Took 1.14 seconds to destroy the instance on the hypervisor. [ 862.605009] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 862.605241] env[69475]: DEBUG nova.compute.manager [-] [instance: 93607154-f135-4925-9c3a-a97051535b00] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 862.605328] env[69475]: DEBUG nova.network.neutron [-] [instance: 93607154-f135-4925-9c3a-a97051535b00] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 862.630834] env[69475]: DEBUG oslo_concurrency.lockutils [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 862.655257] env[69475]: DEBUG nova.policy [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1dbe602ef9f64662ac728252f5259321', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '572bc56741e24d57a4d01f202c8fb78d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 862.872980] env[69475]: DEBUG oslo_vmware.api [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508241, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.890814] env[69475]: DEBUG oslo_vmware.api [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': task-3508242, 'name': PowerOffVM_Task, 'duration_secs': 0.260119} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.891374] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.891374] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 862.894073] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4748c8b8-72ec-43f5-a19d-ba04653be3f4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.954418] env[69475]: DEBUG nova.network.neutron [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 862.966110] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.966427] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.966884] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Deleting the datastore file [datastore1] 619a87e7-097c-41af-8452-5437b82e7ebe {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.967625] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b1fd5aa9-08c8-4f9a-9828-ed24b4e1ef33 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.976045] env[69475]: DEBUG oslo_vmware.api [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Waiting for the task: (returnval){ [ 862.976045] env[69475]: value = "task-3508244" [ 862.976045] env[69475]: _type = "Task" [ 862.976045] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.986073] env[69475]: DEBUG oslo_vmware.api [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': task-3508244, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.080792] env[69475]: DEBUG nova.compute.manager [req-933245e4-4130-48f3-93e3-b9622a7fc09e req-53d730b6-bfae-4b18-84cc-5b4fabce57cc service nova] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Received event network-vif-deleted-7c0a7b95-70af-4d8c-a572-06b7aa8b7a34 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 863.080792] env[69475]: DEBUG nova.compute.manager [req-933245e4-4130-48f3-93e3-b9622a7fc09e req-53d730b6-bfae-4b18-84cc-5b4fabce57cc service nova] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Received event network-vif-plugged-9e1b604b-8b51-4d1d-a716-b433d77aa5a3 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 863.080792] env[69475]: DEBUG oslo_concurrency.lockutils [req-933245e4-4130-48f3-93e3-b9622a7fc09e req-53d730b6-bfae-4b18-84cc-5b4fabce57cc service nova] Acquiring lock "a87da6e4-d7ec-4624-94bc-b76ade04d511-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.080792] env[69475]: DEBUG oslo_concurrency.lockutils [req-933245e4-4130-48f3-93e3-b9622a7fc09e req-53d730b6-bfae-4b18-84cc-5b4fabce57cc service nova] Lock "a87da6e4-d7ec-4624-94bc-b76ade04d511-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 863.080792] env[69475]: DEBUG oslo_concurrency.lockutils [req-933245e4-4130-48f3-93e3-b9622a7fc09e req-53d730b6-bfae-4b18-84cc-5b4fabce57cc service nova] Lock "a87da6e4-d7ec-4624-94bc-b76ade04d511-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.080792] env[69475]: DEBUG nova.compute.manager [req-933245e4-4130-48f3-93e3-b9622a7fc09e req-53d730b6-bfae-4b18-84cc-5b4fabce57cc service nova] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] No waiting events found dispatching network-vif-plugged-9e1b604b-8b51-4d1d-a716-b433d77aa5a3 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 863.082037] env[69475]: WARNING nova.compute.manager [req-933245e4-4130-48f3-93e3-b9622a7fc09e req-53d730b6-bfae-4b18-84cc-5b4fabce57cc service nova] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Received unexpected event network-vif-plugged-9e1b604b-8b51-4d1d-a716-b433d77aa5a3 for instance with vm_state building and task_state spawning. [ 863.082469] env[69475]: DEBUG nova.compute.manager [req-933245e4-4130-48f3-93e3-b9622a7fc09e req-53d730b6-bfae-4b18-84cc-5b4fabce57cc service nova] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Received event network-changed-9e1b604b-8b51-4d1d-a716-b433d77aa5a3 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 863.082724] env[69475]: DEBUG nova.compute.manager [req-933245e4-4130-48f3-93e3-b9622a7fc09e req-53d730b6-bfae-4b18-84cc-5b4fabce57cc service nova] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Refreshing instance network info cache due to event network-changed-9e1b604b-8b51-4d1d-a716-b433d77aa5a3. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 863.082901] env[69475]: DEBUG oslo_concurrency.lockutils [req-933245e4-4130-48f3-93e3-b9622a7fc09e req-53d730b6-bfae-4b18-84cc-5b4fabce57cc service nova] Acquiring lock "refresh_cache-a87da6e4-d7ec-4624-94bc-b76ade04d511" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.086855] env[69475]: DEBUG nova.compute.manager [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 863.126895] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1755d1fc-1cc4-4023-bb0b-be260b7bafc1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.137007] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff02dc99-b5dd-4b15-9fe0-d37817b1c20f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.175674] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901aec8a-39c8-4a8c-87f9-e90630ef1eab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.184706] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b26bc5-c2ef-455c-a2a0-4b3eaee19fc2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.200555] env[69475]: DEBUG nova.compute.provider_tree [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 863.216398] env[69475]: DEBUG nova.network.neutron [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Updating instance_info_cache with network_info: [{"id": "9e1b604b-8b51-4d1d-a716-b433d77aa5a3", "address": "fa:16:3e:fa:7c:18", "network": {"id": "c35eed50-417f-4eee-92d8-63f9c06d148f", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-49170861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947a74cfc69b45dbb3aa09060c5b76f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e1b604b-8b", "ovs_interfaceid": "9e1b604b-8b51-4d1d-a716-b433d77aa5a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.374658] env[69475]: DEBUG oslo_vmware.api [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508241, 'name': ReconfigVM_Task, 'duration_secs': 0.611002} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.376077] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Reconfigured VM instance instance-00000035 to attach disk [datastore2] 86464a01-e034-43b6-a6d5-45f9e3b6715b/86464a01-e034-43b6-a6d5-45f9e3b6715b.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 863.376077] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Updating instance '86464a01-e034-43b6-a6d5-45f9e3b6715b' progress to 50 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 863.430846] env[69475]: DEBUG nova.network.neutron [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Successfully created port: 87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 863.486326] env[69475]: DEBUG oslo_vmware.api [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Task: {'id': task-3508244, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.239378} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.486573] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 863.486755] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 863.486933] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 863.487119] env[69475]: INFO nova.compute.manager [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Took 1.13 seconds to destroy the instance on the hypervisor. [ 863.487365] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 863.487558] env[69475]: DEBUG nova.compute.manager [-] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 863.487653] env[69475]: DEBUG nova.network.neutron [-] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 863.706365] env[69475]: DEBUG nova.scheduler.client.report [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 863.721367] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Releasing lock "refresh_cache-a87da6e4-d7ec-4624-94bc-b76ade04d511" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 863.721367] env[69475]: DEBUG nova.compute.manager [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Instance network_info: |[{"id": "9e1b604b-8b51-4d1d-a716-b433d77aa5a3", "address": "fa:16:3e:fa:7c:18", "network": {"id": "c35eed50-417f-4eee-92d8-63f9c06d148f", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-49170861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947a74cfc69b45dbb3aa09060c5b76f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e1b604b-8b", "ovs_interfaceid": "9e1b604b-8b51-4d1d-a716-b433d77aa5a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 863.721367] env[69475]: DEBUG oslo_concurrency.lockutils [req-933245e4-4130-48f3-93e3-b9622a7fc09e req-53d730b6-bfae-4b18-84cc-5b4fabce57cc service nova] Acquired lock "refresh_cache-a87da6e4-d7ec-4624-94bc-b76ade04d511" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 863.721367] env[69475]: DEBUG nova.network.neutron [req-933245e4-4130-48f3-93e3-b9622a7fc09e req-53d730b6-bfae-4b18-84cc-5b4fabce57cc service nova] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Refreshing network info cache for port 9e1b604b-8b51-4d1d-a716-b433d77aa5a3 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 863.721870] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:7c:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1002b79b-224e-41e3-a484-4245a767147a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e1b604b-8b51-4d1d-a716-b433d77aa5a3', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 863.735874] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 863.737076] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 863.737524] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-803f24ee-0a4e-40a2-82c4-9e65a0cc28cc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.764040] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 863.764040] env[69475]: value = "task-3508245" [ 863.764040] env[69475]: _type = "Task" [ 863.764040] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.775120] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508245, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.888639] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b90fc364-31f4-41b3-aca0-48dd6ef83336 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.909487] env[69475]: DEBUG nova.network.neutron [-] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.911152] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79092c2e-2d90-4d71-aa52-e6bc938249de {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.931596] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Updating instance '86464a01-e034-43b6-a6d5-45f9e3b6715b' progress to 67 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 863.973443] env[69475]: DEBUG nova.compute.manager [req-2bfbb2e8-0e48-44af-a880-f4cfe626ff87 req-5c8a81b4-6fa7-46b0-9e33-7f4333be814a service nova] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Received event network-vif-deleted-b4a2d38d-faf3-4c3c-9bf1-a9ae9f50da7f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 864.035292] env[69475]: DEBUG nova.network.neutron [-] [instance: 93607154-f135-4925-9c3a-a97051535b00] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.100774] env[69475]: DEBUG nova.compute.manager [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 864.128584] env[69475]: DEBUG nova.virt.hardware [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 864.128831] env[69475]: DEBUG nova.virt.hardware [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 864.128983] env[69475]: DEBUG nova.virt.hardware [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 864.129196] env[69475]: DEBUG nova.virt.hardware [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 864.129343] env[69475]: DEBUG nova.virt.hardware [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 864.129490] env[69475]: DEBUG nova.virt.hardware [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 864.129696] env[69475]: DEBUG nova.virt.hardware [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 864.129853] env[69475]: DEBUG nova.virt.hardware [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 864.130903] env[69475]: DEBUG nova.virt.hardware [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 864.130903] env[69475]: DEBUG nova.virt.hardware [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 864.130903] env[69475]: DEBUG nova.virt.hardware [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 864.131733] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970a2ae9-e6c8-4f94-9fde-0f1ea4260850 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.139088] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c328841-36f4-4354-ac50-67a852799b1f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.216474] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.148s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.218229] env[69475]: DEBUG oslo_concurrency.lockutils [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.435s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.219383] env[69475]: INFO nova.compute.claims [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 864.243585] env[69475]: INFO nova.scheduler.client.report [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Deleted allocations for instance b71882d4-537d-4a90-b43d-f8ac4ca0d90c [ 864.279022] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508245, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.414492] env[69475]: INFO nova.compute.manager [-] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Took 0.93 seconds to deallocate network for instance. [ 864.475705] env[69475]: DEBUG nova.network.neutron [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Port 858c37b6-4824-46d3-9dff-c0e0d91c47b5 binding to destination host cpu-1 is already ACTIVE {{(pid=69475) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 864.494283] env[69475]: DEBUG nova.network.neutron [req-933245e4-4130-48f3-93e3-b9622a7fc09e req-53d730b6-bfae-4b18-84cc-5b4fabce57cc service nova] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Updated VIF entry in instance network info cache for port 9e1b604b-8b51-4d1d-a716-b433d77aa5a3. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 864.494676] env[69475]: DEBUG nova.network.neutron [req-933245e4-4130-48f3-93e3-b9622a7fc09e req-53d730b6-bfae-4b18-84cc-5b4fabce57cc service nova] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Updating instance_info_cache with network_info: [{"id": "9e1b604b-8b51-4d1d-a716-b433d77aa5a3", "address": "fa:16:3e:fa:7c:18", "network": {"id": "c35eed50-417f-4eee-92d8-63f9c06d148f", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-49170861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947a74cfc69b45dbb3aa09060c5b76f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e1b604b-8b", "ovs_interfaceid": "9e1b604b-8b51-4d1d-a716-b433d77aa5a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.538663] env[69475]: INFO nova.compute.manager [-] [instance: 93607154-f135-4925-9c3a-a97051535b00] Took 1.93 seconds to deallocate network for instance. [ 864.750157] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2d7f4c1f-5dd2-473c-8959-53988450e7ca tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "b71882d4-537d-4a90-b43d-f8ac4ca0d90c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.014s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 864.776319] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508245, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.922821] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.961388] env[69475]: DEBUG nova.network.neutron [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Successfully updated port: 87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 864.996867] env[69475]: DEBUG oslo_concurrency.lockutils [req-933245e4-4130-48f3-93e3-b9622a7fc09e req-53d730b6-bfae-4b18-84cc-5b4fabce57cc service nova] Releasing lock "refresh_cache-a87da6e4-d7ec-4624-94bc-b76ade04d511" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.045140] env[69475]: DEBUG oslo_concurrency.lockutils [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.141286] env[69475]: DEBUG nova.compute.manager [req-97e20c8c-43c4-40ed-a2c1-b454a5c6650e req-62bc9a9b-13a8-43cb-a092-efef6b629952 service nova] [instance: 93607154-f135-4925-9c3a-a97051535b00] Received event network-vif-deleted-29b2e26b-edae-4c53-98e5-15ce643aa4d0 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 865.141520] env[69475]: DEBUG nova.compute.manager [req-97e20c8c-43c4-40ed-a2c1-b454a5c6650e req-62bc9a9b-13a8-43cb-a092-efef6b629952 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Received event network-vif-plugged-87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 865.141757] env[69475]: DEBUG oslo_concurrency.lockutils [req-97e20c8c-43c4-40ed-a2c1-b454a5c6650e req-62bc9a9b-13a8-43cb-a092-efef6b629952 service nova] Acquiring lock "e8c2d21e-2e42-48de-928e-c5fd944899b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.142014] env[69475]: DEBUG oslo_concurrency.lockutils [req-97e20c8c-43c4-40ed-a2c1-b454a5c6650e req-62bc9a9b-13a8-43cb-a092-efef6b629952 service nova] Lock "e8c2d21e-2e42-48de-928e-c5fd944899b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.142251] env[69475]: DEBUG oslo_concurrency.lockutils [req-97e20c8c-43c4-40ed-a2c1-b454a5c6650e req-62bc9a9b-13a8-43cb-a092-efef6b629952 service nova] Lock "e8c2d21e-2e42-48de-928e-c5fd944899b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.142425] env[69475]: DEBUG nova.compute.manager [req-97e20c8c-43c4-40ed-a2c1-b454a5c6650e req-62bc9a9b-13a8-43cb-a092-efef6b629952 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] No waiting events found dispatching network-vif-plugged-87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 865.142636] env[69475]: WARNING nova.compute.manager [req-97e20c8c-43c4-40ed-a2c1-b454a5c6650e req-62bc9a9b-13a8-43cb-a092-efef6b629952 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Received unexpected event network-vif-plugged-87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 for instance with vm_state building and task_state spawning. [ 865.142822] env[69475]: DEBUG nova.compute.manager [req-97e20c8c-43c4-40ed-a2c1-b454a5c6650e req-62bc9a9b-13a8-43cb-a092-efef6b629952 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Received event network-changed-87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 865.142980] env[69475]: DEBUG nova.compute.manager [req-97e20c8c-43c4-40ed-a2c1-b454a5c6650e req-62bc9a9b-13a8-43cb-a092-efef6b629952 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Refreshing instance network info cache due to event network-changed-87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 865.143319] env[69475]: DEBUG oslo_concurrency.lockutils [req-97e20c8c-43c4-40ed-a2c1-b454a5c6650e req-62bc9a9b-13a8-43cb-a092-efef6b629952 service nova] Acquiring lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.143489] env[69475]: DEBUG oslo_concurrency.lockutils [req-97e20c8c-43c4-40ed-a2c1-b454a5c6650e req-62bc9a9b-13a8-43cb-a092-efef6b629952 service nova] Acquired lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.143667] env[69475]: DEBUG nova.network.neutron [req-97e20c8c-43c4-40ed-a2c1-b454a5c6650e req-62bc9a9b-13a8-43cb-a092-efef6b629952 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Refreshing network info cache for port 87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 865.277907] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508245, 'name': CreateVM_Task, 'duration_secs': 1.397268} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.277907] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 865.278567] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.279260] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.279260] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 865.279365] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-93eeb8db-b55a-4920-8527-66c804a0fbfe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.286962] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 865.286962] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522905b2-a98c-6423-eabc-3472a043928d" [ 865.286962] env[69475]: _type = "Task" [ 865.286962] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.295119] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522905b2-a98c-6423-eabc-3472a043928d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.463595] env[69475]: DEBUG oslo_concurrency.lockutils [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.505120] env[69475]: DEBUG oslo_concurrency.lockutils [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "86464a01-e034-43b6-a6d5-45f9e3b6715b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.505171] env[69475]: DEBUG oslo_concurrency.lockutils [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "86464a01-e034-43b6-a6d5-45f9e3b6715b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.505333] env[69475]: DEBUG oslo_concurrency.lockutils [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "86464a01-e034-43b6-a6d5-45f9e3b6715b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.611493] env[69475]: DEBUG oslo_concurrency.lockutils [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquiring lock "7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.611740] env[69475]: DEBUG oslo_concurrency.lockutils [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.611937] env[69475]: DEBUG oslo_concurrency.lockutils [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquiring lock "7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.612132] env[69475]: DEBUG oslo_concurrency.lockutils [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.612356] env[69475]: DEBUG oslo_concurrency.lockutils [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.614343] env[69475]: INFO nova.compute.manager [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Terminating instance [ 865.632863] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-595cee29-c227-4913-9c14-f2b3ab939ea0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.641355] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3dd27c-6a37-467f-b2ff-9bce1300baed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.674850] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6f8adf-5b2a-4977-95da-85e2fea5215c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.677445] env[69475]: DEBUG nova.network.neutron [req-97e20c8c-43c4-40ed-a2c1-b454a5c6650e req-62bc9a9b-13a8-43cb-a092-efef6b629952 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 865.684257] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03211e46-0109-4a1f-86b6-5f9a2141426d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.698633] env[69475]: DEBUG nova.compute.provider_tree [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 865.757691] env[69475]: DEBUG nova.network.neutron [req-97e20c8c-43c4-40ed-a2c1-b454a5c6650e req-62bc9a9b-13a8-43cb-a092-efef6b629952 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.796718] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522905b2-a98c-6423-eabc-3472a043928d, 'name': SearchDatastore_Task, 'duration_secs': 0.015302} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.796985] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 865.797228] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 865.797462] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.797674] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.797777] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 865.798038] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fadbf8e-1fd5-4d98-9d10-8304c34df26c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.806176] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 865.806358] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 865.807054] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6754309d-f298-4c9e-bf21-1e39f1ebdace {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.813244] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 865.813244] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520b7e6c-0c9c-d763-e8ad-e492f02d8680" [ 865.813244] env[69475]: _type = "Task" [ 865.813244] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.820549] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520b7e6c-0c9c-d763-e8ad-e492f02d8680, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.118749] env[69475]: DEBUG nova.compute.manager [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 866.119070] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 866.119910] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded174ba-28cb-42d0-86ce-841ee7eb3e18 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.128757] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 866.128757] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf0f5a8f-277c-4802-bea5-b9d4d2057ace {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.135816] env[69475]: DEBUG oslo_vmware.api [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 866.135816] env[69475]: value = "task-3508246" [ 866.135816] env[69475]: _type = "Task" [ 866.135816] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.146814] env[69475]: DEBUG oslo_vmware.api [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508246, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.202315] env[69475]: DEBUG nova.scheduler.client.report [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 866.260143] env[69475]: DEBUG oslo_concurrency.lockutils [req-97e20c8c-43c4-40ed-a2c1-b454a5c6650e req-62bc9a9b-13a8-43cb-a092-efef6b629952 service nova] Releasing lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.260675] env[69475]: DEBUG oslo_concurrency.lockutils [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquired lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.260824] env[69475]: DEBUG nova.network.neutron [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 866.323624] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520b7e6c-0c9c-d763-e8ad-e492f02d8680, 'name': SearchDatastore_Task, 'duration_secs': 0.00897} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.324291] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb7ac66c-2a5d-47f0-bff7-44451973a3b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.329894] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 866.329894] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5289c769-e8a6-0ade-0372-2e301b9db9eb" [ 866.329894] env[69475]: _type = "Task" [ 866.329894] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.338338] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5289c769-e8a6-0ade-0372-2e301b9db9eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.565781] env[69475]: DEBUG oslo_concurrency.lockutils [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "refresh_cache-86464a01-e034-43b6-a6d5-45f9e3b6715b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.566019] env[69475]: DEBUG oslo_concurrency.lockutils [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "refresh_cache-86464a01-e034-43b6-a6d5-45f9e3b6715b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 866.566238] env[69475]: DEBUG nova.network.neutron [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 866.645814] env[69475]: DEBUG oslo_vmware.api [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508246, 'name': PowerOffVM_Task, 'duration_secs': 0.218277} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.646075] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 866.646264] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 866.646510] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85e243eb-5634-4f8a-921f-71823b866977 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.705912] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 866.706164] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 866.706379] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Deleting the datastore file [datastore2] 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 866.706646] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd8b97d1-074c-4b83-9698-33a30f0bf6d9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.709150] env[69475]: DEBUG oslo_concurrency.lockutils [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.491s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.709625] env[69475]: DEBUG nova.compute.manager [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 866.712725] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 38.978s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.713074] env[69475]: DEBUG nova.objects.instance [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69475) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 866.720189] env[69475]: DEBUG oslo_vmware.api [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for the task: (returnval){ [ 866.720189] env[69475]: value = "task-3508248" [ 866.720189] env[69475]: _type = "Task" [ 866.720189] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.728633] env[69475]: DEBUG oslo_vmware.api [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508248, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.790832] env[69475]: DEBUG nova.network.neutron [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 866.842348] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5289c769-e8a6-0ade-0372-2e301b9db9eb, 'name': SearchDatastore_Task, 'duration_secs': 0.016187} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.842348] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.842617] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] a87da6e4-d7ec-4624-94bc-b76ade04d511/a87da6e4-d7ec-4624-94bc-b76ade04d511.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 866.843143] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3873d149-a217-4811-9e89-258ac738cffa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.850812] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 866.850812] env[69475]: value = "task-3508249" [ 866.850812] env[69475]: _type = "Task" [ 866.850812] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.858396] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508249, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.929545] env[69475]: DEBUG nova.network.neutron [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Updating instance_info_cache with network_info: [{"id": "87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39", "address": "fa:16:3e:5a:ef:b3", "network": {"id": "62083b30-e966-4802-8960-367f1137a879", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-79648352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572bc56741e24d57a4d01f202c8fb78d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87df7fc0-9f", "ovs_interfaceid": "87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.215015] env[69475]: DEBUG nova.compute.utils [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 867.216609] env[69475]: DEBUG nova.compute.manager [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 867.216800] env[69475]: DEBUG nova.network.neutron [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 867.238412] env[69475]: DEBUG oslo_vmware.api [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Task: {'id': task-3508248, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132308} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.238412] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 867.238412] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 867.238608] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 867.238708] env[69475]: INFO nova.compute.manager [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Took 1.12 seconds to destroy the instance on the hypervisor. [ 867.238952] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 867.239198] env[69475]: DEBUG nova.compute.manager [-] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 867.239321] env[69475]: DEBUG nova.network.neutron [-] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 867.288822] env[69475]: DEBUG nova.policy [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42c54237c534486d86b3a161149fd013', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e760df406d80477a9a7c4d345093d3db', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 867.361494] env[69475]: DEBUG nova.network.neutron [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Updating instance_info_cache with network_info: [{"id": "858c37b6-4824-46d3-9dff-c0e0d91c47b5", "address": "fa:16:3e:8e:8b:e4", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap858c37b6-48", "ovs_interfaceid": "858c37b6-4824-46d3-9dff-c0e0d91c47b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.369364] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508249, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475955} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.369364] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] a87da6e4-d7ec-4624-94bc-b76ade04d511/a87da6e4-d7ec-4624-94bc-b76ade04d511.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 867.369364] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 867.369364] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca99f581-001d-4bed-a625-c4ac32d7d0db {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.376843] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 867.376843] env[69475]: value = "task-3508250" [ 867.376843] env[69475]: _type = "Task" [ 867.376843] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.385711] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508250, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.435256] env[69475]: DEBUG oslo_concurrency.lockutils [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Releasing lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.435256] env[69475]: DEBUG nova.compute.manager [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Instance network_info: |[{"id": "87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39", "address": "fa:16:3e:5a:ef:b3", "network": {"id": "62083b30-e966-4802-8960-367f1137a879", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-79648352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572bc56741e24d57a4d01f202c8fb78d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87df7fc0-9f", "ovs_interfaceid": "87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 867.435256] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:ef:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.444490] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Creating folder: Project (572bc56741e24d57a4d01f202c8fb78d). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 867.445638] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-544f301e-3a1c-4d94-bbf3-9c3715760e38 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.456292] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Created folder: Project (572bc56741e24d57a4d01f202c8fb78d) in parent group-v700823. [ 867.456860] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Creating folder: Instances. Parent ref: group-v701011. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 867.457264] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8f658fa0-2595-45af-be48-67c19be42b91 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.468828] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Created folder: Instances in parent group-v701011. [ 867.468828] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 867.468828] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 867.468828] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-552f5a8b-a68a-4dc7-a8f1-8fdd3191525b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.490021] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.490021] env[69475]: value = "task-3508253" [ 867.490021] env[69475]: _type = "Task" [ 867.490021] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.496750] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508253, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.652927] env[69475]: DEBUG nova.compute.manager [req-b9507b3a-bd55-463b-9bf0-63af38830d3e req-92ff7257-6c8c-4e1a-bafc-1dbc0e0f664a service nova] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Received event network-vif-deleted-8bc8f5fa-c3ec-45d0-bbd5-84002529188f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 867.653619] env[69475]: INFO nova.compute.manager [req-b9507b3a-bd55-463b-9bf0-63af38830d3e req-92ff7257-6c8c-4e1a-bafc-1dbc0e0f664a service nova] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Neutron deleted interface 8bc8f5fa-c3ec-45d0-bbd5-84002529188f; detaching it from the instance and deleting it from the info cache [ 867.653865] env[69475]: DEBUG nova.network.neutron [req-b9507b3a-bd55-463b-9bf0-63af38830d3e req-92ff7257-6c8c-4e1a-bafc-1dbc0e0f664a service nova] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.726335] env[69475]: DEBUG nova.compute.manager [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 867.731301] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6893bb41-724d-4240-98be-095e3fb886d0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.732543] env[69475]: DEBUG oslo_concurrency.lockutils [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.944s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.734192] env[69475]: INFO nova.compute.claims [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 867.737731] env[69475]: DEBUG nova.network.neutron [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Successfully created port: 2ba8c01f-b78c-4077-bb73-ff63d385807e {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 867.864781] env[69475]: DEBUG oslo_concurrency.lockutils [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "refresh_cache-86464a01-e034-43b6-a6d5-45f9e3b6715b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 867.883145] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508250, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058555} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.883845] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 867.884655] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d55f4c8-79e0-44f0-9041-8810fc8e9baa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.910087] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] a87da6e4-d7ec-4624-94bc-b76ade04d511/a87da6e4-d7ec-4624-94bc-b76ade04d511.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 867.910408] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b4154d9-3e18-4b04-a3da-c5715b640747 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.933021] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 867.933021] env[69475]: value = "task-3508254" [ 867.933021] env[69475]: _type = "Task" [ 867.933021] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.940015] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508254, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.002736] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508253, 'name': CreateVM_Task, 'duration_secs': 0.355152} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.002935] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 868.003671] env[69475]: DEBUG oslo_concurrency.lockutils [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.003897] env[69475]: DEBUG oslo_concurrency.lockutils [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.004225] env[69475]: DEBUG oslo_concurrency.lockutils [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 868.004475] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92e71f2a-4d1b-4267-9832-c317991ea80d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.009041] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 868.009041] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ff2fe8-f3cf-d6af-bef0-35962844bffa" [ 868.009041] env[69475]: _type = "Task" [ 868.009041] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.017023] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ff2fe8-f3cf-d6af-bef0-35962844bffa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.044608] env[69475]: DEBUG nova.network.neutron [-] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.157029] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd92256a-82e6-4536-aa94-d7da66b71a86 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.166380] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db848452-9042-4f36-8043-6d21c84760de {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.202197] env[69475]: DEBUG nova.compute.manager [req-b9507b3a-bd55-463b-9bf0-63af38830d3e req-92ff7257-6c8c-4e1a-bafc-1dbc0e0f664a service nova] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Detach interface failed, port_id=8bc8f5fa-c3ec-45d0-bbd5-84002529188f, reason: Instance 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 868.205952] env[69475]: DEBUG oslo_concurrency.lockutils [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Acquiring lock "7be48799-ea4a-4e7f-95c2-637460596cfc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.206215] env[69475]: DEBUG oslo_concurrency.lockutils [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Lock "7be48799-ea4a-4e7f-95c2-637460596cfc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.206436] env[69475]: DEBUG oslo_concurrency.lockutils [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Acquiring lock "7be48799-ea4a-4e7f-95c2-637460596cfc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.206624] env[69475]: DEBUG oslo_concurrency.lockutils [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Lock "7be48799-ea4a-4e7f-95c2-637460596cfc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 868.206803] env[69475]: DEBUG oslo_concurrency.lockutils [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Lock "7be48799-ea4a-4e7f-95c2-637460596cfc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 868.209627] env[69475]: INFO nova.compute.manager [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Terminating instance [ 868.395198] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67684b38-3a1c-4998-8db9-3aee726e4469 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.415253] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f7a14ee-f633-4d12-8123-8fd8064d72ac {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.421858] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Updating instance '86464a01-e034-43b6-a6d5-45f9e3b6715b' progress to 83 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 868.440624] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508254, 'name': ReconfigVM_Task, 'duration_secs': 0.382298} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.440883] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Reconfigured VM instance instance-00000040 to attach disk [datastore2] a87da6e4-d7ec-4624-94bc-b76ade04d511/a87da6e4-d7ec-4624-94bc-b76ade04d511.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 868.441211] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=69475) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 868.441842] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-28d2a741-70a5-4b34-b696-49215d8d2198 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.449310] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 868.449310] env[69475]: value = "task-3508255" [ 868.449310] env[69475]: _type = "Task" [ 868.449310] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.458271] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508255, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.518779] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ff2fe8-f3cf-d6af-bef0-35962844bffa, 'name': SearchDatastore_Task, 'duration_secs': 0.00936} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.519119] env[69475]: DEBUG oslo_concurrency.lockutils [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 868.519333] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 868.519570] env[69475]: DEBUG oslo_concurrency.lockutils [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.519713] env[69475]: DEBUG oslo_concurrency.lockutils [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 868.519892] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 868.520168] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fddd2a8b-fb3f-4160-8019-8f44fed82bfb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.527939] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 868.528130] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 868.528830] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55055472-f267-407e-a2d7-d5240b5b240c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.533644] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 868.533644] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ab8c00-5495-18a7-5ae4-a634ee7eda47" [ 868.533644] env[69475]: _type = "Task" [ 868.533644] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.541677] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ab8c00-5495-18a7-5ae4-a634ee7eda47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.549203] env[69475]: INFO nova.compute.manager [-] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Took 1.31 seconds to deallocate network for instance. [ 868.716178] env[69475]: DEBUG nova.compute.manager [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 868.716440] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 868.718138] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95f1132-99ac-409d-adb3-0f9ff20621b7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.725993] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 868.726236] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f8982b7-7881-4e46-a982-8514c80b8ffb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.733057] env[69475]: DEBUG oslo_vmware.api [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Waiting for the task: (returnval){ [ 868.733057] env[69475]: value = "task-3508256" [ 868.733057] env[69475]: _type = "Task" [ 868.733057] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.738595] env[69475]: DEBUG nova.compute.manager [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 868.746099] env[69475]: DEBUG oslo_vmware.api [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': task-3508256, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.771391] env[69475]: DEBUG nova.virt.hardware [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 868.771638] env[69475]: DEBUG nova.virt.hardware [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 868.771793] env[69475]: DEBUG nova.virt.hardware [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 868.771970] env[69475]: DEBUG nova.virt.hardware [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 868.772131] env[69475]: DEBUG nova.virt.hardware [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 868.772310] env[69475]: DEBUG nova.virt.hardware [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 868.772598] env[69475]: DEBUG nova.virt.hardware [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 868.772781] env[69475]: DEBUG nova.virt.hardware [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 868.772952] env[69475]: DEBUG nova.virt.hardware [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 868.773131] env[69475]: DEBUG nova.virt.hardware [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 868.773316] env[69475]: DEBUG nova.virt.hardware [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 868.774216] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12706ab1-01a5-4f32-8c63-098fd11151ee {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.785172] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c19fb3b9-6a8a-46a0-b3eb-268eee94ac7a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.928050] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 868.930626] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-03ec9f3f-93b1-4d23-9417-9dafa93180a8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.937740] env[69475]: DEBUG oslo_vmware.api [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 868.937740] env[69475]: value = "task-3508257" [ 868.937740] env[69475]: _type = "Task" [ 868.937740] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.948505] env[69475]: DEBUG oslo_vmware.api [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508257, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.960702] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508255, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.051287} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.961311] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=69475) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 868.962347] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bf48acb-251e-4970-952a-8faedda973c1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.987738] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] a87da6e4-d7ec-4624-94bc-b76ade04d511/ephemeral_0.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 868.990686] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c19d06c-b11a-4131-bb21-52c7089e4912 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.008531] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 869.008531] env[69475]: value = "task-3508258" [ 869.008531] env[69475]: _type = "Task" [ 869.008531] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.017587] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508258, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.047400] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ab8c00-5495-18a7-5ae4-a634ee7eda47, 'name': SearchDatastore_Task, 'duration_secs': 0.008443} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.048186] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f93f9e98-97ae-407e-833a-355df7ee91f4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.055093] env[69475]: DEBUG oslo_concurrency.lockutils [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.056112] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 869.056112] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524a7f8f-971b-0bd2-d33f-cb8548dcf1bb" [ 869.056112] env[69475]: _type = "Task" [ 869.056112] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.063787] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524a7f8f-971b-0bd2-d33f-cb8548dcf1bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.190273] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567c3bce-f3f2-4651-b3f3-6325764526e5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.198686] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b855dd-86eb-47bf-a2ae-47184822e56c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.241270] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e595683-5762-4f6e-a01a-068abdc2e6b1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.249543] env[69475]: DEBUG oslo_vmware.api [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': task-3508256, 'name': PowerOffVM_Task, 'duration_secs': 0.219017} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.251985] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 869.252192] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 869.252509] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-96f3b742-02ae-4259-ab58-61b28b217413 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.254992] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b77d0f0a-c6c8-4b05-8a45-7e1d92c28abf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.271461] env[69475]: DEBUG nova.compute.provider_tree [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.306225] env[69475]: DEBUG nova.compute.manager [req-7a7497a2-d212-49d9-b81c-5cd02696c862 req-6ccc2601-1dd4-47b7-ba16-511e6ad394fb service nova] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Received event network-vif-plugged-2ba8c01f-b78c-4077-bb73-ff63d385807e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 869.306491] env[69475]: DEBUG oslo_concurrency.lockutils [req-7a7497a2-d212-49d9-b81c-5cd02696c862 req-6ccc2601-1dd4-47b7-ba16-511e6ad394fb service nova] Acquiring lock "980bb0eb-121c-4703-a453-fb0b4351e9e3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.306741] env[69475]: DEBUG oslo_concurrency.lockutils [req-7a7497a2-d212-49d9-b81c-5cd02696c862 req-6ccc2601-1dd4-47b7-ba16-511e6ad394fb service nova] Lock "980bb0eb-121c-4703-a453-fb0b4351e9e3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.306979] env[69475]: DEBUG oslo_concurrency.lockutils [req-7a7497a2-d212-49d9-b81c-5cd02696c862 req-6ccc2601-1dd4-47b7-ba16-511e6ad394fb service nova] Lock "980bb0eb-121c-4703-a453-fb0b4351e9e3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 869.307184] env[69475]: DEBUG nova.compute.manager [req-7a7497a2-d212-49d9-b81c-5cd02696c862 req-6ccc2601-1dd4-47b7-ba16-511e6ad394fb service nova] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] No waiting events found dispatching network-vif-plugged-2ba8c01f-b78c-4077-bb73-ff63d385807e {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 869.307411] env[69475]: WARNING nova.compute.manager [req-7a7497a2-d212-49d9-b81c-5cd02696c862 req-6ccc2601-1dd4-47b7-ba16-511e6ad394fb service nova] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Received unexpected event network-vif-plugged-2ba8c01f-b78c-4077-bb73-ff63d385807e for instance with vm_state building and task_state spawning. [ 869.324067] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 869.324484] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 869.324600] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Deleting the datastore file [datastore1] 7be48799-ea4a-4e7f-95c2-637460596cfc {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 869.324774] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d84ee789-a381-4be4-9c35-af0b25444e39 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.332155] env[69475]: DEBUG oslo_vmware.api [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Waiting for the task: (returnval){ [ 869.332155] env[69475]: value = "task-3508260" [ 869.332155] env[69475]: _type = "Task" [ 869.332155] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.340736] env[69475]: DEBUG oslo_vmware.api [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': task-3508260, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.404334] env[69475]: DEBUG nova.network.neutron [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Successfully updated port: 2ba8c01f-b78c-4077-bb73-ff63d385807e {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 869.449565] env[69475]: DEBUG oslo_vmware.api [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508257, 'name': PowerOnVM_Task, 'duration_secs': 0.397769} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.449833] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 869.450023] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-035c660f-42e0-4d12-b55d-62cbd0898cf5 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Updating instance '86464a01-e034-43b6-a6d5-45f9e3b6715b' progress to 100 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 869.521095] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508258, 'name': ReconfigVM_Task, 'duration_secs': 0.389039} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.521438] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Reconfigured VM instance instance-00000040 to attach disk [datastore2] a87da6e4-d7ec-4624-94bc-b76ade04d511/ephemeral_0.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 869.522445] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-88c216fb-b0dc-4c55-9c7b-f34dc369f5d8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.530138] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 869.530138] env[69475]: value = "task-3508261" [ 869.530138] env[69475]: _type = "Task" [ 869.530138] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.537794] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508261, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.569016] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524a7f8f-971b-0bd2-d33f-cb8548dcf1bb, 'name': SearchDatastore_Task, 'duration_secs': 0.009764} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.569016] env[69475]: DEBUG oslo_concurrency.lockutils [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 869.569016] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] e8c2d21e-2e42-48de-928e-c5fd944899b6/e8c2d21e-2e42-48de-928e-c5fd944899b6.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 869.569016] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-06438fe2-e1ba-4731-a26d-d66a313fb69e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.573845] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 869.573845] env[69475]: value = "task-3508262" [ 869.573845] env[69475]: _type = "Task" [ 869.573845] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.581864] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508262, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.775721] env[69475]: DEBUG nova.scheduler.client.report [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 869.843312] env[69475]: DEBUG oslo_vmware.api [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Task: {'id': task-3508260, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155442} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.843615] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 869.843787] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 869.843986] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 869.844183] env[69475]: INFO nova.compute.manager [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Took 1.13 seconds to destroy the instance on the hypervisor. [ 869.845563] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 869.845563] env[69475]: DEBUG nova.compute.manager [-] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 869.845563] env[69475]: DEBUG nova.network.neutron [-] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 869.906954] env[69475]: DEBUG oslo_concurrency.lockutils [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "refresh_cache-980bb0eb-121c-4703-a453-fb0b4351e9e3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.907319] env[69475]: DEBUG oslo_concurrency.lockutils [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquired lock "refresh_cache-980bb0eb-121c-4703-a453-fb0b4351e9e3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 869.907392] env[69475]: DEBUG nova.network.neutron [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 870.039587] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508261, 'name': Rename_Task, 'duration_secs': 0.181254} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.040164] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 870.040639] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b9b5f1ea-f300-4e9d-93fb-0a02d52e8379 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.050600] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 870.050600] env[69475]: value = "task-3508263" [ 870.050600] env[69475]: _type = "Task" [ 870.050600] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.062438] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508263, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.089344] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508262, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.281164] env[69475]: DEBUG oslo_concurrency.lockutils [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.548s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.281589] env[69475]: DEBUG nova.compute.manager [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 870.284648] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.448s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.284938] env[69475]: DEBUG nova.objects.instance [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lazy-loading 'resources' on Instance uuid e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 870.452871] env[69475]: DEBUG nova.network.neutron [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 870.568749] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508263, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.584026] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508262, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.634099] env[69475]: DEBUG nova.network.neutron [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Updating instance_info_cache with network_info: [{"id": "2ba8c01f-b78c-4077-bb73-ff63d385807e", "address": "fa:16:3e:7b:bd:14", "network": {"id": "36231912-6ad2-4f94-b3f0-3e1c47b777fe", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-440948387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e760df406d80477a9a7c4d345093d3db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ba8c01f-b7", "ovs_interfaceid": "2ba8c01f-b78c-4077-bb73-ff63d385807e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.795740] env[69475]: DEBUG nova.compute.utils [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 870.798141] env[69475]: DEBUG nova.compute.manager [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 870.798322] env[69475]: DEBUG nova.network.neutron [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 870.838169] env[69475]: DEBUG nova.policy [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d4323c195b24245a75109e165f900f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e6dd9c026624896ae4de7fab35720d8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 871.062013] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508263, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.073748] env[69475]: DEBUG nova.network.neutron [-] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.090031] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508262, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.497028} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.091268] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] e8c2d21e-2e42-48de-928e-c5fd944899b6/e8c2d21e-2e42-48de-928e-c5fd944899b6.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 871.091268] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 871.091268] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7b609250-6fca-46f8-b8b1-f8ca3112e143 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.097553] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 871.097553] env[69475]: value = "task-3508264" [ 871.097553] env[69475]: _type = "Task" [ 871.097553] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.108522] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508264, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.137091] env[69475]: DEBUG nova.network.neutron [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Successfully created port: 1bb10344-b9bf-42e7-9ee2-2b246b9975fa {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 871.139255] env[69475]: DEBUG oslo_concurrency.lockutils [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Releasing lock "refresh_cache-980bb0eb-121c-4703-a453-fb0b4351e9e3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 871.139927] env[69475]: DEBUG nova.compute.manager [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Instance network_info: |[{"id": "2ba8c01f-b78c-4077-bb73-ff63d385807e", "address": "fa:16:3e:7b:bd:14", "network": {"id": "36231912-6ad2-4f94-b3f0-3e1c47b777fe", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-440948387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e760df406d80477a9a7c4d345093d3db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ba8c01f-b7", "ovs_interfaceid": "2ba8c01f-b78c-4077-bb73-ff63d385807e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 871.140381] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:bd:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '424fd631-4456-4ce2-8924-a2ed81d60bd6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2ba8c01f-b78c-4077-bb73-ff63d385807e', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 871.149181] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 871.153795] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 871.154192] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5ac7b4ff-289e-4d83-9a66-f490db0d9a23 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.177879] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 871.177879] env[69475]: value = "task-3508265" [ 871.177879] env[69475]: _type = "Task" [ 871.177879] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.193697] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508265, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.298861] env[69475]: DEBUG nova.compute.manager [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 871.306025] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd799cb6-4230-48c8-8d47-5dfa5a03dcdf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.312638] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9be7223d-0385-4067-aa1a-c71cf0f35f4d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.356232] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ae6b6e8-0991-4723-ad5e-52361cc07de5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.364337] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed5717e4-3cfb-441e-aa22-d7386a8efcdd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.379548] env[69475]: DEBUG nova.compute.provider_tree [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.441138] env[69475]: DEBUG nova.compute.manager [req-179f2d79-be3b-4e82-810a-1283ad881a5e req-7d58698e-3500-4025-bbb4-a5192153b704 service nova] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Received event network-changed-2ba8c01f-b78c-4077-bb73-ff63d385807e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 871.441269] env[69475]: DEBUG nova.compute.manager [req-179f2d79-be3b-4e82-810a-1283ad881a5e req-7d58698e-3500-4025-bbb4-a5192153b704 service nova] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Refreshing instance network info cache due to event network-changed-2ba8c01f-b78c-4077-bb73-ff63d385807e. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 871.441375] env[69475]: DEBUG oslo_concurrency.lockutils [req-179f2d79-be3b-4e82-810a-1283ad881a5e req-7d58698e-3500-4025-bbb4-a5192153b704 service nova] Acquiring lock "refresh_cache-980bb0eb-121c-4703-a453-fb0b4351e9e3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.441526] env[69475]: DEBUG oslo_concurrency.lockutils [req-179f2d79-be3b-4e82-810a-1283ad881a5e req-7d58698e-3500-4025-bbb4-a5192153b704 service nova] Acquired lock "refresh_cache-980bb0eb-121c-4703-a453-fb0b4351e9e3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.442224] env[69475]: DEBUG nova.network.neutron [req-179f2d79-be3b-4e82-810a-1283ad881a5e req-7d58698e-3500-4025-bbb4-a5192153b704 service nova] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Refreshing network info cache for port 2ba8c01f-b78c-4077-bb73-ff63d385807e {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 871.561921] env[69475]: DEBUG oslo_vmware.api [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508263, 'name': PowerOnVM_Task, 'duration_secs': 1.248891} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.562640] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 871.562983] env[69475]: INFO nova.compute.manager [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Took 9.61 seconds to spawn the instance on the hypervisor. [ 871.563351] env[69475]: DEBUG nova.compute.manager [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 871.564225] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eee73ec-ba23-48c6-8a18-a46a575664ca {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.576725] env[69475]: INFO nova.compute.manager [-] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Took 1.73 seconds to deallocate network for instance. [ 871.607474] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508264, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.104355} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.607750] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 871.608828] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e823fc96-6be1-4c91-b49c-97dcdffbf972 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.631770] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] e8c2d21e-2e42-48de-928e-c5fd944899b6/e8c2d21e-2e42-48de-928e-c5fd944899b6.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 871.632393] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a024303-aa68-4143-9ec2-7c1782ccd81a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.651900] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 871.651900] env[69475]: value = "task-3508266" [ 871.651900] env[69475]: _type = "Task" [ 871.651900] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.659301] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508266, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.690010] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508265, 'name': CreateVM_Task, 'duration_secs': 0.347089} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.690208] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 871.691090] env[69475]: DEBUG oslo_concurrency.lockutils [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.691285] env[69475]: DEBUG oslo_concurrency.lockutils [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 871.691683] env[69475]: DEBUG oslo_concurrency.lockutils [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 871.691973] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80275625-5518-429e-805b-439fd79a4a98 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.697268] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 871.697268] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5220e2f9-ddf9-b1ec-c734-7f4ec6bda3c4" [ 871.697268] env[69475]: _type = "Task" [ 871.697268] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.705712] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5220e2f9-ddf9-b1ec-c734-7f4ec6bda3c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.882468] env[69475]: DEBUG nova.scheduler.client.report [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 872.087095] env[69475]: INFO nova.compute.manager [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Took 56.76 seconds to build instance. [ 872.089834] env[69475]: DEBUG oslo_concurrency.lockutils [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.161484] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508266, 'name': ReconfigVM_Task, 'duration_secs': 0.325452} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.161760] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Reconfigured VM instance instance-00000041 to attach disk [datastore2] e8c2d21e-2e42-48de-928e-c5fd944899b6/e8c2d21e-2e42-48de-928e-c5fd944899b6.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 872.162421] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-643311dc-a767-4e52-8fd2-a3f8fc4278fc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.170754] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 872.170754] env[69475]: value = "task-3508267" [ 872.170754] env[69475]: _type = "Task" [ 872.170754] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.178589] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508267, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.210929] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5220e2f9-ddf9-b1ec-c734-7f4ec6bda3c4, 'name': SearchDatastore_Task, 'duration_secs': 0.00981} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.211253] env[69475]: DEBUG oslo_concurrency.lockutils [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.212176] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 872.212176] env[69475]: DEBUG oslo_concurrency.lockutils [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.212176] env[69475]: DEBUG oslo_concurrency.lockutils [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 872.212176] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 872.212357] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-19ea6a9c-dcba-4f89-bd2f-db8257e436d4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.221174] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 872.221973] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 872.222171] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30fc9a81-e990-45f1-a9cc-f395b701926e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.227504] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 872.227504] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dde0ba-3c59-19a0-52e3-094d6ad4e7eb" [ 872.227504] env[69475]: _type = "Task" [ 872.227504] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.236023] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dde0ba-3c59-19a0-52e3-094d6ad4e7eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.238906] env[69475]: DEBUG nova.network.neutron [req-179f2d79-be3b-4e82-810a-1283ad881a5e req-7d58698e-3500-4025-bbb4-a5192153b704 service nova] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Updated VIF entry in instance network info cache for port 2ba8c01f-b78c-4077-bb73-ff63d385807e. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 872.239256] env[69475]: DEBUG nova.network.neutron [req-179f2d79-be3b-4e82-810a-1283ad881a5e req-7d58698e-3500-4025-bbb4-a5192153b704 service nova] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Updating instance_info_cache with network_info: [{"id": "2ba8c01f-b78c-4077-bb73-ff63d385807e", "address": "fa:16:3e:7b:bd:14", "network": {"id": "36231912-6ad2-4f94-b3f0-3e1c47b777fe", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-440948387-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e760df406d80477a9a7c4d345093d3db", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "424fd631-4456-4ce2-8924-a2ed81d60bd6", "external-id": "nsx-vlan-transportzone-19", "segmentation_id": 19, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2ba8c01f-b7", "ovs_interfaceid": "2ba8c01f-b78c-4077-bb73-ff63d385807e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.312095] env[69475]: DEBUG nova.compute.manager [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 872.314296] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "86464a01-e034-43b6-a6d5-45f9e3b6715b" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.314574] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "86464a01-e034-43b6-a6d5-45f9e3b6715b" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.314797] env[69475]: DEBUG nova.compute.manager [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Going to confirm migration 2 {{(pid=69475) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 872.343891] env[69475]: DEBUG nova.virt.hardware [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 872.344393] env[69475]: DEBUG nova.virt.hardware [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 872.344393] env[69475]: DEBUG nova.virt.hardware [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 872.344499] env[69475]: DEBUG nova.virt.hardware [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 872.344618] env[69475]: DEBUG nova.virt.hardware [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 872.344768] env[69475]: DEBUG nova.virt.hardware [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 872.344974] env[69475]: DEBUG nova.virt.hardware [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 872.345145] env[69475]: DEBUG nova.virt.hardware [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 872.345739] env[69475]: DEBUG nova.virt.hardware [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 872.345992] env[69475]: DEBUG nova.virt.hardware [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 872.346215] env[69475]: DEBUG nova.virt.hardware [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 872.347152] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c0a56f-407d-4600-8580-c00da168daf8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.357416] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070eeb39-31af-4ede-a616-4e148d11e6bb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.387933] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.103s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.390787] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.212s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.391078] env[69475]: DEBUG nova.objects.instance [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lazy-loading 'resources' on Instance uuid d1a316d5-59ef-4286-9d7e-a444ffadc49d {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 872.418938] env[69475]: INFO nova.scheduler.client.report [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Deleted allocations for instance e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d [ 872.591848] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f22ac1ff-af2f-4a1c-8072-a5b951eb322f tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "a87da6e4-d7ec-4624-94bc-b76ade04d511" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.707s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.682301] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508267, 'name': Rename_Task, 'duration_secs': 0.23501} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.682617] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 872.682925] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e01eacc0-a721-4c7d-bf58-b8fd3366ffdd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.692979] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 872.692979] env[69475]: value = "task-3508268" [ 872.692979] env[69475]: _type = "Task" [ 872.692979] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.701485] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508268, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.735158] env[69475]: DEBUG nova.network.neutron [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Successfully updated port: 1bb10344-b9bf-42e7-9ee2-2b246b9975fa {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 872.743304] env[69475]: DEBUG oslo_concurrency.lockutils [req-179f2d79-be3b-4e82-810a-1283ad881a5e req-7d58698e-3500-4025-bbb4-a5192153b704 service nova] Releasing lock "refresh_cache-980bb0eb-121c-4703-a453-fb0b4351e9e3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.743824] env[69475]: DEBUG nova.compute.manager [req-179f2d79-be3b-4e82-810a-1283ad881a5e req-7d58698e-3500-4025-bbb4-a5192153b704 service nova] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Received event network-vif-deleted-56faa0c7-80a1-46f5-8167-4485e94846ea {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 872.744788] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dde0ba-3c59-19a0-52e3-094d6ad4e7eb, 'name': SearchDatastore_Task, 'duration_secs': 0.01205} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.746442] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b05704d5-95ac-4a2d-85df-fff1fe80975e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.752411] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 872.752411] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d758d4-963f-a8ca-b485-d395747dbe00" [ 872.752411] env[69475]: _type = "Task" [ 872.752411] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.761995] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d758d4-963f-a8ca-b485-d395747dbe00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.786361] env[69475]: DEBUG nova.compute.manager [req-9c352f9d-0314-43d3-b6c2-0a5479e1d426 req-731fecde-e14c-48a3-85eb-60b27bdcd010 service nova] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Received event network-vif-plugged-1bb10344-b9bf-42e7-9ee2-2b246b9975fa {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 872.786361] env[69475]: DEBUG oslo_concurrency.lockutils [req-9c352f9d-0314-43d3-b6c2-0a5479e1d426 req-731fecde-e14c-48a3-85eb-60b27bdcd010 service nova] Acquiring lock "eadfea6c-3fce-4f54-b889-d994d61ec14f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 872.786361] env[69475]: DEBUG oslo_concurrency.lockutils [req-9c352f9d-0314-43d3-b6c2-0a5479e1d426 req-731fecde-e14c-48a3-85eb-60b27bdcd010 service nova] Lock "eadfea6c-3fce-4f54-b889-d994d61ec14f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 872.786361] env[69475]: DEBUG oslo_concurrency.lockutils [req-9c352f9d-0314-43d3-b6c2-0a5479e1d426 req-731fecde-e14c-48a3-85eb-60b27bdcd010 service nova] Lock "eadfea6c-3fce-4f54-b889-d994d61ec14f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 872.786361] env[69475]: DEBUG nova.compute.manager [req-9c352f9d-0314-43d3-b6c2-0a5479e1d426 req-731fecde-e14c-48a3-85eb-60b27bdcd010 service nova] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] No waiting events found dispatching network-vif-plugged-1bb10344-b9bf-42e7-9ee2-2b246b9975fa {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 872.786361] env[69475]: WARNING nova.compute.manager [req-9c352f9d-0314-43d3-b6c2-0a5479e1d426 req-731fecde-e14c-48a3-85eb-60b27bdcd010 service nova] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Received unexpected event network-vif-plugged-1bb10344-b9bf-42e7-9ee2-2b246b9975fa for instance with vm_state building and task_state spawning. [ 872.903222] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "refresh_cache-86464a01-e034-43b6-a6d5-45f9e3b6715b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.903423] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "refresh_cache-86464a01-e034-43b6-a6d5-45f9e3b6715b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 872.903605] env[69475]: DEBUG nova.network.neutron [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 872.903790] env[69475]: DEBUG nova.objects.instance [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lazy-loading 'info_cache' on Instance uuid 86464a01-e034-43b6-a6d5-45f9e3b6715b {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 872.932175] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2d1da5a0-7a99-402b-b77e-b0c3ef5a7e8f tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.329s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.204827] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508268, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.238026] env[69475]: DEBUG oslo_concurrency.lockutils [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "refresh_cache-eadfea6c-3fce-4f54-b889-d994d61ec14f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.238146] env[69475]: DEBUG oslo_concurrency.lockutils [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired lock "refresh_cache-eadfea6c-3fce-4f54-b889-d994d61ec14f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.238263] env[69475]: DEBUG nova.network.neutron [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 873.267620] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d758d4-963f-a8ca-b485-d395747dbe00, 'name': SearchDatastore_Task, 'duration_secs': 0.028782} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.268387] env[69475]: DEBUG oslo_concurrency.lockutils [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.268387] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 980bb0eb-121c-4703-a453-fb0b4351e9e3/980bb0eb-121c-4703-a453-fb0b4351e9e3.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 873.268548] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ee302996-e7bb-4036-9039-63682b5d5c29 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.275317] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 873.275317] env[69475]: value = "task-3508269" [ 873.275317] env[69475]: _type = "Task" [ 873.275317] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.287173] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508269, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.373200] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ad62f5-a52a-42da-a45b-6394481fa476 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.382196] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20507d1e-ddf8-431d-9c0e-d28b2c7b0113 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.439654] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b9c072-2771-47d8-b351-a6c80c25cbff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.448705] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f936aa08-fa0c-4b73-9e33-3c443fbb2592 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.463967] env[69475]: DEBUG nova.compute.provider_tree [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 873.516291] env[69475]: DEBUG nova.compute.manager [req-58fcfed7-4b69-43b2-9fbf-448448e3fd8f req-52be4e5c-67ed-40fe-bb9f-91778b42e520 service nova] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Received event network-changed-9e1b604b-8b51-4d1d-a716-b433d77aa5a3 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 873.516498] env[69475]: DEBUG nova.compute.manager [req-58fcfed7-4b69-43b2-9fbf-448448e3fd8f req-52be4e5c-67ed-40fe-bb9f-91778b42e520 service nova] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Refreshing instance network info cache due to event network-changed-9e1b604b-8b51-4d1d-a716-b433d77aa5a3. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 873.516731] env[69475]: DEBUG oslo_concurrency.lockutils [req-58fcfed7-4b69-43b2-9fbf-448448e3fd8f req-52be4e5c-67ed-40fe-bb9f-91778b42e520 service nova] Acquiring lock "refresh_cache-a87da6e4-d7ec-4624-94bc-b76ade04d511" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.516850] env[69475]: DEBUG oslo_concurrency.lockutils [req-58fcfed7-4b69-43b2-9fbf-448448e3fd8f req-52be4e5c-67ed-40fe-bb9f-91778b42e520 service nova] Acquired lock "refresh_cache-a87da6e4-d7ec-4624-94bc-b76ade04d511" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 873.517016] env[69475]: DEBUG nova.network.neutron [req-58fcfed7-4b69-43b2-9fbf-448448e3fd8f req-52be4e5c-67ed-40fe-bb9f-91778b42e520 service nova] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Refreshing network info cache for port 9e1b604b-8b51-4d1d-a716-b433d77aa5a3 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 873.702715] env[69475]: DEBUG oslo_vmware.api [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508268, 'name': PowerOnVM_Task, 'duration_secs': 0.580105} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.703013] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 873.703205] env[69475]: INFO nova.compute.manager [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Took 9.60 seconds to spawn the instance on the hypervisor. [ 873.703418] env[69475]: DEBUG nova.compute.manager [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 873.704297] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60934c1f-a764-4c2d-b579-ea7698aa70e9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.776427] env[69475]: DEBUG nova.network.neutron [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 873.787803] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508269, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474814} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.788134] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 980bb0eb-121c-4703-a453-fb0b4351e9e3/980bb0eb-121c-4703-a453-fb0b4351e9e3.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 873.788367] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 873.788619] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d048e0ab-672f-4ccf-91b4-4cff5801275b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.796368] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 873.796368] env[69475]: value = "task-3508270" [ 873.796368] env[69475]: _type = "Task" [ 873.796368] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.806846] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508270, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.938640] env[69475]: DEBUG nova.network.neutron [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Updating instance_info_cache with network_info: [{"id": "1bb10344-b9bf-42e7-9ee2-2b246b9975fa", "address": "fa:16:3e:8c:7f:fe", "network": {"id": "49b579f0-c5a9-487e-b469-7e6420355dce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-83616973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6dd9c026624896ae4de7fab35720d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1bb10344-b9", "ovs_interfaceid": "1bb10344-b9bf-42e7-9ee2-2b246b9975fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.967621] env[69475]: DEBUG nova.scheduler.client.report [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 874.146810] env[69475]: DEBUG nova.network.neutron [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Updating instance_info_cache with network_info: [{"id": "858c37b6-4824-46d3-9dff-c0e0d91c47b5", "address": "fa:16:3e:8e:8b:e4", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap858c37b6-48", "ovs_interfaceid": "858c37b6-4824-46d3-9dff-c0e0d91c47b5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.224212] env[69475]: INFO nova.compute.manager [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Took 53.30 seconds to build instance. [ 874.268631] env[69475]: DEBUG nova.network.neutron [req-58fcfed7-4b69-43b2-9fbf-448448e3fd8f req-52be4e5c-67ed-40fe-bb9f-91778b42e520 service nova] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Updated VIF entry in instance network info cache for port 9e1b604b-8b51-4d1d-a716-b433d77aa5a3. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 874.269918] env[69475]: DEBUG nova.network.neutron [req-58fcfed7-4b69-43b2-9fbf-448448e3fd8f req-52be4e5c-67ed-40fe-bb9f-91778b42e520 service nova] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Updating instance_info_cache with network_info: [{"id": "9e1b604b-8b51-4d1d-a716-b433d77aa5a3", "address": "fa:16:3e:fa:7c:18", "network": {"id": "c35eed50-417f-4eee-92d8-63f9c06d148f", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-49170861-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.147", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "947a74cfc69b45dbb3aa09060c5b76f4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e1b604b-8b", "ovs_interfaceid": "9e1b604b-8b51-4d1d-a716-b433d77aa5a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.305616] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508270, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073382} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.305866] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 874.306637] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850516c3-85d8-4c02-bf19-e9cd41053265 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.328428] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 980bb0eb-121c-4703-a453-fb0b4351e9e3/980bb0eb-121c-4703-a453-fb0b4351e9e3.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 874.328670] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab45bb45-12cb-429a-9a05-6c11e764bdf4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.347544] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 874.347544] env[69475]: value = "task-3508271" [ 874.347544] env[69475]: _type = "Task" [ 874.347544] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.355342] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508271, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.441628] env[69475]: DEBUG oslo_concurrency.lockutils [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Releasing lock "refresh_cache-eadfea6c-3fce-4f54-b889-d994d61ec14f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 874.442040] env[69475]: DEBUG nova.compute.manager [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Instance network_info: |[{"id": "1bb10344-b9bf-42e7-9ee2-2b246b9975fa", "address": "fa:16:3e:8c:7f:fe", "network": {"id": "49b579f0-c5a9-487e-b469-7e6420355dce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-83616973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6dd9c026624896ae4de7fab35720d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1bb10344-b9", "ovs_interfaceid": "1bb10344-b9bf-42e7-9ee2-2b246b9975fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 874.442555] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8c:7f:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1bb10344-b9bf-42e7-9ee2-2b246b9975fa', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 874.451157] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 874.451429] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 874.451698] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0158d1be-d684-4b1d-a1d2-c0d5b355c925 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.473199] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.082s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.476131] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 874.476131] env[69475]: value = "task-3508272" [ 874.476131] env[69475]: _type = "Task" [ 874.476131] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.476131] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.528s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.476131] env[69475]: DEBUG nova.objects.instance [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lazy-loading 'resources' on Instance uuid 5e3e57c5-8367-493f-8268-a0e496c8c878 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 874.487820] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508272, 'name': CreateVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.504895] env[69475]: INFO nova.scheduler.client.report [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Deleted allocations for instance d1a316d5-59ef-4286-9d7e-a444ffadc49d [ 874.650554] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "refresh_cache-86464a01-e034-43b6-a6d5-45f9e3b6715b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 874.650750] env[69475]: DEBUG nova.objects.instance [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lazy-loading 'migration_context' on Instance uuid 86464a01-e034-43b6-a6d5-45f9e3b6715b {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 874.726618] env[69475]: DEBUG oslo_concurrency.lockutils [None req-385aa597-1bbc-4f35-b43d-6e16ca305040 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e8c2d21e-2e42-48de-928e-c5fd944899b6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.626s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.772455] env[69475]: DEBUG oslo_concurrency.lockutils [req-58fcfed7-4b69-43b2-9fbf-448448e3fd8f req-52be4e5c-67ed-40fe-bb9f-91778b42e520 service nova] Releasing lock "refresh_cache-a87da6e4-d7ec-4624-94bc-b76ade04d511" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 874.857431] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508271, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.878930] env[69475]: DEBUG nova.compute.manager [req-45cf66c0-2780-485b-bbc8-0543de00b5f1 req-6b7fb71f-77e4-443e-b5bf-7249b23d58b4 service nova] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Received event network-changed-1bb10344-b9bf-42e7-9ee2-2b246b9975fa {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 874.879950] env[69475]: DEBUG nova.compute.manager [req-45cf66c0-2780-485b-bbc8-0543de00b5f1 req-6b7fb71f-77e4-443e-b5bf-7249b23d58b4 service nova] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Refreshing instance network info cache due to event network-changed-1bb10344-b9bf-42e7-9ee2-2b246b9975fa. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 874.879950] env[69475]: DEBUG oslo_concurrency.lockutils [req-45cf66c0-2780-485b-bbc8-0543de00b5f1 req-6b7fb71f-77e4-443e-b5bf-7249b23d58b4 service nova] Acquiring lock "refresh_cache-eadfea6c-3fce-4f54-b889-d994d61ec14f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.879950] env[69475]: DEBUG oslo_concurrency.lockutils [req-45cf66c0-2780-485b-bbc8-0543de00b5f1 req-6b7fb71f-77e4-443e-b5bf-7249b23d58b4 service nova] Acquired lock "refresh_cache-eadfea6c-3fce-4f54-b889-d994d61ec14f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.879950] env[69475]: DEBUG nova.network.neutron [req-45cf66c0-2780-485b-bbc8-0543de00b5f1 req-6b7fb71f-77e4-443e-b5bf-7249b23d58b4 service nova] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Refreshing network info cache for port 1bb10344-b9bf-42e7-9ee2-2b246b9975fa {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 874.995185] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508272, 'name': CreateVM_Task, 'duration_secs': 0.362496} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.995364] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 874.996092] env[69475]: DEBUG oslo_concurrency.lockutils [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.996285] env[69475]: DEBUG oslo_concurrency.lockutils [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.996610] env[69475]: DEBUG oslo_concurrency.lockutils [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 874.996865] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01855d78-a658-4be6-8acb-46913975c9d1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.001624] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 875.001624] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5295d440-9f8a-d57c-a153-d2a26fd59bfa" [ 875.001624] env[69475]: _type = "Task" [ 875.001624] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.012572] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5295d440-9f8a-d57c-a153-d2a26fd59bfa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.013018] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d6dd6619-e369-4e80-bee2-87f3c58f8d0f tempest-SecurityGroupsTestJSON-2140826043 tempest-SecurityGroupsTestJSON-2140826043-project-member] Lock "d1a316d5-59ef-4286-9d7e-a444ffadc49d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.463s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 875.153528] env[69475]: DEBUG nova.objects.base [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Object Instance<86464a01-e034-43b6-a6d5-45f9e3b6715b> lazy-loaded attributes: info_cache,migration_context {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 875.154456] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3807d3f1-548b-4397-93d6-2f38fcd915ca {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.185313] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-073bafe7-d8c7-4b89-9318-a4b625bb5dc6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.190521] env[69475]: DEBUG oslo_vmware.api [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 875.190521] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52810084-fea9-2436-bb1b-1c7e3b541b90" [ 875.190521] env[69475]: _type = "Task" [ 875.190521] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.198627] env[69475]: DEBUG oslo_vmware.api [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52810084-fea9-2436-bb1b-1c7e3b541b90, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.365130] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508271, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.431451] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-424c2697-5311-4df2-85d0-833abcd6719d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.438803] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e57790-e93b-4d98-b0f8-b04ac3ef3d1a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.493928] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0bfb428-b6bb-4f78-9542-cb2b6e619a11 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.510733] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1beee89-a328-4c3d-b373-a259766c6e38 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.531302] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5295d440-9f8a-d57c-a153-d2a26fd59bfa, 'name': SearchDatastore_Task, 'duration_secs': 0.016379} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.537038] env[69475]: DEBUG oslo_concurrency.lockutils [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.537334] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 875.537541] env[69475]: DEBUG oslo_concurrency.lockutils [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.537688] env[69475]: DEBUG oslo_concurrency.lockutils [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.537867] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 875.538341] env[69475]: DEBUG nova.compute.provider_tree [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 875.539669] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-311b607a-3db4-4418-a9ca-de3a93ba0270 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.551110] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 875.551314] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 875.552829] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f08f23f3-9a51-4288-b565-a158d4cfe9d3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.558901] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 875.558901] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5213341b-1c46-0f23-7f4f-8f5470214490" [ 875.558901] env[69475]: _type = "Task" [ 875.558901] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.568860] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5213341b-1c46-0f23-7f4f-8f5470214490, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.624183] env[69475]: DEBUG nova.compute.manager [req-93af5d11-89a3-4421-93ef-048ae940f4a2 req-f93ea28a-7de8-4c89-9158-32238ef868ff service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Received event network-changed-87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 875.624378] env[69475]: DEBUG nova.compute.manager [req-93af5d11-89a3-4421-93ef-048ae940f4a2 req-f93ea28a-7de8-4c89-9158-32238ef868ff service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Refreshing instance network info cache due to event network-changed-87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 875.624623] env[69475]: DEBUG oslo_concurrency.lockutils [req-93af5d11-89a3-4421-93ef-048ae940f4a2 req-f93ea28a-7de8-4c89-9158-32238ef868ff service nova] Acquiring lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.624726] env[69475]: DEBUG oslo_concurrency.lockutils [req-93af5d11-89a3-4421-93ef-048ae940f4a2 req-f93ea28a-7de8-4c89-9158-32238ef868ff service nova] Acquired lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.627234] env[69475]: DEBUG nova.network.neutron [req-93af5d11-89a3-4421-93ef-048ae940f4a2 req-f93ea28a-7de8-4c89-9158-32238ef868ff service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Refreshing network info cache for port 87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 875.701027] env[69475]: DEBUG oslo_vmware.api [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52810084-fea9-2436-bb1b-1c7e3b541b90, 'name': SearchDatastore_Task, 'duration_secs': 0.007692} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.702815] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 875.706694] env[69475]: DEBUG nova.network.neutron [req-45cf66c0-2780-485b-bbc8-0543de00b5f1 req-6b7fb71f-77e4-443e-b5bf-7249b23d58b4 service nova] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Updated VIF entry in instance network info cache for port 1bb10344-b9bf-42e7-9ee2-2b246b9975fa. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 875.707016] env[69475]: DEBUG nova.network.neutron [req-45cf66c0-2780-485b-bbc8-0543de00b5f1 req-6b7fb71f-77e4-443e-b5bf-7249b23d58b4 service nova] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Updating instance_info_cache with network_info: [{"id": "1bb10344-b9bf-42e7-9ee2-2b246b9975fa", "address": "fa:16:3e:8c:7f:fe", "network": {"id": "49b579f0-c5a9-487e-b469-7e6420355dce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-83616973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6dd9c026624896ae4de7fab35720d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1bb10344-b9", "ovs_interfaceid": "1bb10344-b9bf-42e7-9ee2-2b246b9975fa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.865658] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508271, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.045985] env[69475]: DEBUG nova.scheduler.client.report [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 876.078021] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5213341b-1c46-0f23-7f4f-8f5470214490, 'name': SearchDatastore_Task, 'duration_secs': 0.017325} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.078021] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e4a35d8-7956-4436-ba80-8cb18e10d5de {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.083854] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 876.083854] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a3310b-dc42-6555-f1cf-6f034f85d736" [ 876.083854] env[69475]: _type = "Task" [ 876.083854] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.094137] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a3310b-dc42-6555-f1cf-6f034f85d736, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.215461] env[69475]: DEBUG oslo_concurrency.lockutils [req-45cf66c0-2780-485b-bbc8-0543de00b5f1 req-6b7fb71f-77e4-443e-b5bf-7249b23d58b4 service nova] Releasing lock "refresh_cache-eadfea6c-3fce-4f54-b889-d994d61ec14f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.369191] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508271, 'name': ReconfigVM_Task, 'duration_secs': 1.653971} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.369191] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 980bb0eb-121c-4703-a453-fb0b4351e9e3/980bb0eb-121c-4703-a453-fb0b4351e9e3.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 876.369191] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58972d8f-0a87-4d0e-8623-ae601d3999b2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.375538] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 876.375538] env[69475]: value = "task-3508273" [ 876.375538] env[69475]: _type = "Task" [ 876.375538] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.384684] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508273, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.555122] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.076s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.560046] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 40.216s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.599958] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a3310b-dc42-6555-f1cf-6f034f85d736, 'name': SearchDatastore_Task, 'duration_secs': 0.009868} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.599958] env[69475]: DEBUG oslo_concurrency.lockutils [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.599958] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] eadfea6c-3fce-4f54-b889-d994d61ec14f/eadfea6c-3fce-4f54-b889-d994d61ec14f.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 876.599958] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c17a1653-37dc-4246-829f-77f8b4869161 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.606421] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 876.606421] env[69475]: value = "task-3508274" [ 876.606421] env[69475]: _type = "Task" [ 876.606421] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.619983] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508274, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.624268] env[69475]: INFO nova.scheduler.client.report [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Deleted allocations for instance 5e3e57c5-8367-493f-8268-a0e496c8c878 [ 876.722064] env[69475]: DEBUG nova.network.neutron [req-93af5d11-89a3-4421-93ef-048ae940f4a2 req-f93ea28a-7de8-4c89-9158-32238ef868ff service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Updated VIF entry in instance network info cache for port 87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 876.722499] env[69475]: DEBUG nova.network.neutron [req-93af5d11-89a3-4421-93ef-048ae940f4a2 req-f93ea28a-7de8-4c89-9158-32238ef868ff service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Updating instance_info_cache with network_info: [{"id": "87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39", "address": "fa:16:3e:5a:ef:b3", "network": {"id": "62083b30-e966-4802-8960-367f1137a879", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-79648352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572bc56741e24d57a4d01f202c8fb78d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87df7fc0-9f", "ovs_interfaceid": "87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.886040] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508273, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.117704] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508274, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.132954] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3b16e27f-1972-49c6-848d-92615d048b2c tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "5e3e57c5-8367-493f-8268-a0e496c8c878" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 46.579s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.134021] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "5e3e57c5-8367-493f-8268-a0e496c8c878" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 42.785s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.134259] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "5e3e57c5-8367-493f-8268-a0e496c8c878-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 877.134546] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "5e3e57c5-8367-493f-8268-a0e496c8c878-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 877.134726] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "5e3e57c5-8367-493f-8268-a0e496c8c878-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 877.136446] env[69475]: INFO nova.compute.manager [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Terminating instance [ 877.226785] env[69475]: DEBUG oslo_concurrency.lockutils [req-93af5d11-89a3-4421-93ef-048ae940f4a2 req-f93ea28a-7de8-4c89-9158-32238ef868ff service nova] Releasing lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.384654] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508273, 'name': Rename_Task, 'duration_secs': 0.737503} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.385169] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 877.385243] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0715195b-2453-4fc0-b18b-9003d39239f1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.392628] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 877.392628] env[69475]: value = "task-3508275" [ 877.392628] env[69475]: _type = "Task" [ 877.392628] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.401919] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508275, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.582103] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Applying migration context for instance 86464a01-e034-43b6-a6d5-45f9e3b6715b as it has an incoming, in-progress migration 7e88bc9f-10f3-40da-8081-c14e8c051ac4. Migration status is confirming {{(pid=69475) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1046}} [ 877.584152] env[69475]: INFO nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Updating resource usage from migration 7e88bc9f-10f3-40da-8081-c14e8c051ac4 [ 877.616852] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508274, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.599592} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.617919] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 7be48799-ea4a-4e7f-95c2-637460596cfc is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 877.618071] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 93607154-f135-4925-9c3a-a97051535b00 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 877.618120] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 235653ac-a893-4f42-a394-dd81f61f0d73 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 877.618222] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance a21ec73a-2658-4fc6-9bc1-0e492385d59e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 877.618342] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance d1e5e08d-b41a-4655-997d-91fbd3581f00 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 877.618459] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance b41845c6-46bd-4b3b-ab26-d7d2dad08f84 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 877.618586] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 877.618700] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance baf27027-678d-4167-bb9b-df410aeb0e82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 877.618823] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 8bea34ef-0caf-4cdb-a689-dd747d9b52ea is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 877.618944] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 2b0cc71c-862e-4eb0-afc4-b2125003b087 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 877.619119] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance e960f967-d693-4ea8-9390-8b0232941c58 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 877.619290] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 8d50b322-fa03-4e48-b74b-a63578e4701c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 877.619418] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 8cc0636c-84af-4f68-bec8-1493b421a605 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 877.619535] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 86647493-8b2c-46bd-94d3-c973e843f778 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 877.619646] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 4b3b53d1-82bf-40e7-9988-af7b51e9883a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 877.619756] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 877.619908] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 56f0e59a-1c37-4977-81dc-da1a274ce7e7 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 877.620040] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 2ade2ed6-4725-4913-8ac4-14a96ced3e4b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 877.620162] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 619a87e7-097c-41af-8452-5437b82e7ebe is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 877.620288] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance a87da6e4-d7ec-4624-94bc-b76ade04d511 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 877.620403] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance e8c2d21e-2e42-48de-928e-c5fd944899b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 877.620621] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Migration 7e88bc9f-10f3-40da-8081-c14e8c051ac4 is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1742}} [ 877.620782] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 86464a01-e034-43b6-a6d5-45f9e3b6715b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 877.620901] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 980bb0eb-121c-4703-a453-fb0b4351e9e3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 877.621013] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance eadfea6c-3fce-4f54-b889-d994d61ec14f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 877.622355] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] eadfea6c-3fce-4f54-b889-d994d61ec14f/eadfea6c-3fce-4f54-b889-d994d61ec14f.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 877.622565] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 877.622802] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b13e2671-6058-4b4a-a15e-2a045d7dce61 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.629647] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 877.629647] env[69475]: value = "task-3508276" [ 877.629647] env[69475]: _type = "Task" [ 877.629647] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.641449] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "refresh_cache-5e3e57c5-8367-493f-8268-a0e496c8c878" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.641605] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquired lock "refresh_cache-5e3e57c5-8367-493f-8268-a0e496c8c878" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.641861] env[69475]: DEBUG nova.network.neutron [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 877.643079] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508276, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.902736] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508275, 'name': PowerOnVM_Task} progress is 79%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.127055] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 82236043-3222-4134-8717-4c239ed12aba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 878.139334] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508276, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067907} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.140306] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 878.141124] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-300cf6ff-4415-43b8-af57-4149c2755720 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.144305] env[69475]: DEBUG nova.compute.utils [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Can not refresh info_cache because instance was not found {{(pid=69475) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1056}} [ 878.168021] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] eadfea6c-3fce-4f54-b889-d994d61ec14f/eadfea6c-3fce-4f54-b889-d994d61ec14f.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 878.168418] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5001d48b-b72b-4392-bf77-3d6660ae5fd9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.187314] env[69475]: DEBUG nova.network.neutron [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 878.191614] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 878.191614] env[69475]: value = "task-3508277" [ 878.191614] env[69475]: _type = "Task" [ 878.191614] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.201181] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508277, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.370012] env[69475]: DEBUG nova.network.neutron [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.392929] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "b41845c6-46bd-4b3b-ab26-d7d2dad08f84" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.392929] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "b41845c6-46bd-4b3b-ab26-d7d2dad08f84" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.392929] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "b41845c6-46bd-4b3b-ab26-d7d2dad08f84-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 878.393257] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "b41845c6-46bd-4b3b-ab26-d7d2dad08f84-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 878.393311] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "b41845c6-46bd-4b3b-ab26-d7d2dad08f84-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.397878] env[69475]: INFO nova.compute.manager [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Terminating instance [ 878.409546] env[69475]: DEBUG oslo_vmware.api [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508275, 'name': PowerOnVM_Task, 'duration_secs': 0.961859} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.409546] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 878.409546] env[69475]: INFO nova.compute.manager [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Took 9.67 seconds to spawn the instance on the hypervisor. [ 878.409767] env[69475]: DEBUG nova.compute.manager [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 878.410490] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5262a966-3f68-4933-9afd-c1eb736bdfc7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.630041] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance a3ee83aa-f753-49e3-9db2-b1b67d6d211e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 878.702482] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508277, 'name': ReconfigVM_Task, 'duration_secs': 0.320214} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.702766] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Reconfigured VM instance instance-00000043 to attach disk [datastore1] eadfea6c-3fce-4f54-b889-d994d61ec14f/eadfea6c-3fce-4f54-b889-d994d61ec14f.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 878.703417] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e313ea8-fa9a-433a-8c4b-98a91481991c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.709501] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 878.709501] env[69475]: value = "task-3508278" [ 878.709501] env[69475]: _type = "Task" [ 878.709501] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.717619] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508278, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.872140] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Releasing lock "refresh_cache-5e3e57c5-8367-493f-8268-a0e496c8c878" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.872681] env[69475]: DEBUG nova.compute.manager [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 878.872921] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 878.873272] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bc9e6197-082f-4224-aaca-5a20c8c96fd4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.882205] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5761c9-951c-41a5-96c2-25e0e87e0a68 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.916503] env[69475]: DEBUG nova.compute.manager [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 878.916749] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 878.917141] env[69475]: WARNING nova.virt.vmwareapi.vmops [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5e3e57c5-8367-493f-8268-a0e496c8c878 could not be found. [ 878.917318] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 878.917490] env[69475]: INFO nova.compute.manager [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Took 0.04 seconds to destroy the instance on the hypervisor. [ 878.917719] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 878.918509] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937bfa43-42b3-4187-aee6-5abf9ac0cf8e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.921190] env[69475]: DEBUG nova.compute.manager [-] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 878.921291] env[69475]: DEBUG nova.network.neutron [-] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 878.931411] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 878.931873] env[69475]: INFO nova.compute.manager [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Took 53.16 seconds to build instance. [ 878.933117] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b37c78ab-6398-4360-bde3-0c712fd8217b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.939630] env[69475]: DEBUG oslo_vmware.api [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 878.939630] env[69475]: value = "task-3508279" [ 878.939630] env[69475]: _type = "Task" [ 878.939630] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.943651] env[69475]: DEBUG nova.network.neutron [-] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 878.950436] env[69475]: DEBUG oslo_vmware.api [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508279, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.133233] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance b1b04eb9-ded6-4425-8a06-0c26c086a09b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 879.220345] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508278, 'name': Rename_Task, 'duration_secs': 0.137187} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.220657] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 879.220903] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4fa053e6-0768-4a4c-9800-32255c2d576d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.227794] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 879.227794] env[69475]: value = "task-3508280" [ 879.227794] env[69475]: _type = "Task" [ 879.227794] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.236484] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508280, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.433791] env[69475]: DEBUG oslo_concurrency.lockutils [None req-78ecd1ec-4a5d-4cd0-b5b5-8c9030f3d468 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "980bb0eb-121c-4703-a453-fb0b4351e9e3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 54.674s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.448698] env[69475]: DEBUG nova.network.neutron [-] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.449949] env[69475]: DEBUG oslo_vmware.api [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508279, 'name': PowerOffVM_Task, 'duration_secs': 0.213572} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.450555] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 879.450796] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 879.451197] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-723f8793-e2f3-492a-8895-7cc8bced66dd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.509141] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 879.509353] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 879.509537] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Deleting the datastore file [datastore1] b41845c6-46bd-4b3b-ab26-d7d2dad08f84 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 879.509804] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2f9c7fee-e7c5-42e0-9ed2-c7bfad874d11 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.516279] env[69475]: DEBUG oslo_vmware.api [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for the task: (returnval){ [ 879.516279] env[69475]: value = "task-3508282" [ 879.516279] env[69475]: _type = "Task" [ 879.516279] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.524288] env[69475]: DEBUG oslo_vmware.api [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508282, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.637190] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 0a65565c-c679-47e5-8606-832fe3876af6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 879.637190] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 879.637190] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3712MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 879.738067] env[69475]: DEBUG oslo_vmware.api [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508280, 'name': PowerOnVM_Task, 'duration_secs': 0.47135} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.740794] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 879.741064] env[69475]: INFO nova.compute.manager [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Took 7.43 seconds to spawn the instance on the hypervisor. [ 879.741274] env[69475]: DEBUG nova.compute.manager [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 879.742321] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77af07f-6e10-48e5-a813-ac54e887b86e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.951202] env[69475]: INFO nova.compute.manager [-] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Took 1.03 seconds to deallocate network for instance. [ 879.977663] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "f40aa0bb-af1d-4f8f-a906-f1c83307b465" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.977925] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "f40aa0bb-af1d-4f8f-a906-f1c83307b465" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.027849] env[69475]: DEBUG oslo_vmware.api [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Task: {'id': task-3508282, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.299331} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.028434] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.028674] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 880.028880] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.029009] env[69475]: INFO nova.compute.manager [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Took 1.11 seconds to destroy the instance on the hypervisor. [ 880.029281] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 880.031765] env[69475]: DEBUG nova.compute.manager [-] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 880.031867] env[69475]: DEBUG nova.network.neutron [-] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 880.094333] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb5a734-f256-4af2-9ce2-643d66a31588 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.102152] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85174cff-ed53-4dfa-a674-99cb371eb12b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.135865] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006a5b38-34b0-46c1-9fc5-b2568b3a71fb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.145345] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e24f09e-50a1-4bbd-a965-00d58883921c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.162849] env[69475]: DEBUG nova.compute.provider_tree [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.268513] env[69475]: INFO nova.compute.manager [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Took 50.51 seconds to build instance. [ 880.364844] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "9c27dcc3-67df-46ea-947d-b2ecdaeeb003" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.365075] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "9c27dcc3-67df-46ea-947d-b2ecdaeeb003" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.425520] env[69475]: DEBUG oslo_concurrency.lockutils [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "980bb0eb-121c-4703-a453-fb0b4351e9e3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.425847] env[69475]: DEBUG oslo_concurrency.lockutils [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "980bb0eb-121c-4703-a453-fb0b4351e9e3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.425973] env[69475]: DEBUG oslo_concurrency.lockutils [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "980bb0eb-121c-4703-a453-fb0b4351e9e3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 880.426169] env[69475]: DEBUG oslo_concurrency.lockutils [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "980bb0eb-121c-4703-a453-fb0b4351e9e3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 880.426345] env[69475]: DEBUG oslo_concurrency.lockutils [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "980bb0eb-121c-4703-a453-fb0b4351e9e3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.428462] env[69475]: INFO nova.compute.manager [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Terminating instance [ 880.457463] env[69475]: INFO nova.compute.manager [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Instance disappeared during terminate [ 880.457707] env[69475]: DEBUG oslo_concurrency.lockutils [None req-46f550e5-4a5f-4178-8455-ec310ba246ea tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "5e3e57c5-8367-493f-8268-a0e496c8c878" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.324s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.480744] env[69475]: DEBUG nova.compute.manager [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 880.666579] env[69475]: DEBUG nova.scheduler.client.report [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 880.772582] env[69475]: DEBUG oslo_concurrency.lockutils [None req-17681924-248e-4738-b43d-eee6007cb301 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "eadfea6c-3fce-4f54-b889-d994d61ec14f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.024s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.867879] env[69475]: DEBUG nova.compute.manager [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 880.932802] env[69475]: DEBUG nova.compute.manager [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 880.933903] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 880.934100] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0513c0c8-8630-45d0-ae79-cd2308008c02 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.944442] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 880.944711] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de8bbf13-d374-4004-bef4-50297ff15331 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.951032] env[69475]: DEBUG oslo_vmware.api [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 880.951032] env[69475]: value = "task-3508283" [ 880.951032] env[69475]: _type = "Task" [ 880.951032] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.961055] env[69475]: DEBUG oslo_vmware.api [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508283, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.003631] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.103220] env[69475]: DEBUG nova.compute.manager [req-8cfe4c9a-26e7-48fc-8d95-fa5b47a717fc req-fca36eb8-9319-44ec-b31e-4361df9366a1 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Received event network-vif-deleted-5abe617e-d18a-416f-8c40-d0da33a563d2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 881.103439] env[69475]: INFO nova.compute.manager [req-8cfe4c9a-26e7-48fc-8d95-fa5b47a717fc req-fca36eb8-9319-44ec-b31e-4361df9366a1 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Neutron deleted interface 5abe617e-d18a-416f-8c40-d0da33a563d2; detaching it from the instance and deleting it from the info cache [ 881.103707] env[69475]: DEBUG nova.network.neutron [req-8cfe4c9a-26e7-48fc-8d95-fa5b47a717fc req-fca36eb8-9319-44ec-b31e-4361df9366a1 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.171859] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69475) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 881.172073] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.612s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.172402] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 43.223s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.173718] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.174999] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.498s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.176029] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.177278] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 39.415s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.177458] env[69475]: DEBUG nova.objects.instance [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69475) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 881.184587] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 881.184587] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Cleaning up deleted instances {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 881.239964] env[69475]: INFO nova.scheduler.client.report [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleted allocations for instance e960f967-d693-4ea8-9390-8b0232941c58 [ 881.242543] env[69475]: INFO nova.scheduler.client.report [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Deleted allocations for instance 8bea34ef-0caf-4cdb-a689-dd747d9b52ea [ 881.294097] env[69475]: DEBUG nova.network.neutron [-] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.400224] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.437032] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "4b17d080-594b-44e7-83aa-ebe0787722d9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.437135] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "4b17d080-594b-44e7-83aa-ebe0787722d9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.460601] env[69475]: DEBUG oslo_vmware.api [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508283, 'name': PowerOffVM_Task, 'duration_secs': 0.25729} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.460899] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 881.461122] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 881.461401] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5eb9cd4f-074e-47d6-a055-aa54e61819f4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.522690] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 881.522964] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 881.523220] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Deleting the datastore file [datastore2] 980bb0eb-121c-4703-a453-fb0b4351e9e3 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 881.523645] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8dde7538-e982-4723-9a86-af9d6c7466ff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.531276] env[69475]: DEBUG oslo_vmware.api [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for the task: (returnval){ [ 881.531276] env[69475]: value = "task-3508285" [ 881.531276] env[69475]: _type = "Task" [ 881.531276] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.540341] env[69475]: DEBUG oslo_vmware.api [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508285, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.607307] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bf799912-0b9f-4dd0-81e0-e4645619c995 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.616779] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58195bc-cee3-462e-9af3-c5368ee792dc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.649259] env[69475]: DEBUG nova.compute.manager [req-8cfe4c9a-26e7-48fc-8d95-fa5b47a717fc req-fca36eb8-9319-44ec-b31e-4361df9366a1 service nova] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Detach interface failed, port_id=5abe617e-d18a-416f-8c40-d0da33a563d2, reason: Instance b41845c6-46bd-4b3b-ab26-d7d2dad08f84 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 881.697707] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] There are 43 instances to clean {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 881.698038] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 5e3e57c5-8367-493f-8268-a0e496c8c878] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 881.753674] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ae86da91-ddb4-4a42-876e-ced64d859ff2 tempest-ServersTestBootFromVolume-730337904 tempest-ServersTestBootFromVolume-730337904-project-member] Lock "8bea34ef-0caf-4cdb-a689-dd747d9b52ea" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.679s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.754619] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9dcfe256-07ec-40d3-9eb8-2aa60f51207b tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "e960f967-d693-4ea8-9390-8b0232941c58" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.269s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.798844] env[69475]: INFO nova.compute.manager [-] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Took 1.76 seconds to deallocate network for instance. [ 881.939514] env[69475]: DEBUG nova.compute.manager [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 882.043216] env[69475]: DEBUG oslo_vmware.api [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Task: {'id': task-3508285, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.125846} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.043872] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 882.044208] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 882.044507] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 882.044868] env[69475]: INFO nova.compute.manager [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Took 1.11 seconds to destroy the instance on the hypervisor. [ 882.045228] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 882.045546] env[69475]: DEBUG nova.compute.manager [-] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 882.045749] env[69475]: DEBUG nova.network.neutron [-] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 882.202317] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e0cf0767-e3b5-4307-afec-ad4efdd783a0 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.025s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.204062] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 2e7066ca-162e-4465-a9c1-5422510e4c0f] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 882.206788] env[69475]: DEBUG oslo_concurrency.lockutils [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.850s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.206858] env[69475]: DEBUG oslo_concurrency.lockutils [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.209063] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.661s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.210767] env[69475]: INFO nova.compute.claims [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 882.281242] env[69475]: INFO nova.scheduler.client.report [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Deleted allocations for instance 2b0cc71c-862e-4eb0-afc4-b2125003b087 [ 882.305905] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.328059] env[69475]: DEBUG nova.compute.manager [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 882.328705] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab15347-61e2-486b-83da-68cf29856315 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.480225] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.715730] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: e7dd59b9-2a5f-403f-a2ce-78d2a9cd9d3d] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 882.790370] env[69475]: DEBUG oslo_concurrency.lockutils [None req-70330e5e-57e4-41b0-aca0-fc4df5107c17 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "2b0cc71c-862e-4eb0-afc4-b2125003b087" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.441s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.841131] env[69475]: INFO nova.compute.manager [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] instance snapshotting [ 882.847028] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15796de8-f13f-47ca-8b11-b8e430267345 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.869129] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af321a36-86e5-4a52-8e13-90bc570a37c6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.023465] env[69475]: DEBUG nova.network.neutron [-] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.221771] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 41c23568-c8d7-4d6c-8cc4-a94c95b3223a] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 884.055972] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac5a263b-55ac-4807-b1a2-950279e3e885 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "baf27027-678d-4167-bb9b-df410aeb0e82" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.055972] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac5a263b-55ac-4807-b1a2-950279e3e885 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "baf27027-678d-4167-bb9b-df410aeb0e82" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.055972] env[69475]: INFO nova.compute.manager [None req-ac5a263b-55ac-4807-b1a2-950279e3e885 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Rebooting instance [ 884.058609] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 884.058609] env[69475]: INFO nova.compute.manager [-] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Took 2.01 seconds to deallocate network for instance. [ 884.058609] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 420ecc09-60c8-4a14-8504-d11d760ddbb4] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 884.060767] env[69475]: DEBUG nova.compute.manager [req-f750717c-c835-4b67-8a6d-a6ecc3dc2fd9 req-bb8102bf-81fe-4001-b05a-6650e0957f27 service nova] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Received event network-vif-deleted-2ba8c01f-b78c-4077-bb73-ff63d385807e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 884.062635] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2c0e0a73-6572-4c4e-8c00-b13d0c053dd4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.074339] env[69475]: DEBUG oslo_vmware.api [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 884.074339] env[69475]: value = "task-3508286" [ 884.074339] env[69475]: _type = "Task" [ 884.074339] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.087361] env[69475]: DEBUG oslo_vmware.api [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508286, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.462363] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8068a641-f3c2-4647-a03d-a4d4cd89f6fc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.470185] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3365f4c3-c90e-45a1-a451-45b0275cc32a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.508024] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11cf3f6-b47c-490b-843d-80b2a484876d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.515152] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be80a3e9-d080-49a8-a8d5-405caa2bdffc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.528110] env[69475]: DEBUG nova.compute.provider_tree [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 884.564697] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 3fba85c9-7798-4a66-b335-21f80962e0bd] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 884.570964] env[69475]: DEBUG oslo_concurrency.lockutils [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.584112] env[69475]: DEBUG oslo_vmware.api [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508286, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.589858] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac5a263b-55ac-4807-b1a2-950279e3e885 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.590051] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac5a263b-55ac-4807-b1a2-950279e3e885 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.590415] env[69475]: DEBUG nova.network.neutron [None req-ac5a263b-55ac-4807-b1a2-950279e3e885 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 884.824119] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "d1e5e08d-b41a-4655-997d-91fbd3581f00" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.824119] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "d1e5e08d-b41a-4655-997d-91fbd3581f00" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.824119] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "d1e5e08d-b41a-4655-997d-91fbd3581f00-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.824119] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "d1e5e08d-b41a-4655-997d-91fbd3581f00-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.824119] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "d1e5e08d-b41a-4655-997d-91fbd3581f00-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.826642] env[69475]: INFO nova.compute.manager [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Terminating instance [ 885.034209] env[69475]: DEBUG nova.scheduler.client.report [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 885.070251] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: a75d7a92-4ac7-4fa0-90f0-f0a0993e881e] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 885.085930] env[69475]: DEBUG oslo_vmware.api [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508286, 'name': CreateSnapshot_Task, 'duration_secs': 0.83164} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.086200] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 885.086901] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844764ff-d4bd-4ec6-a1a6-d83d52fd1576 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.333111] env[69475]: DEBUG nova.compute.manager [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 885.333347] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 885.334259] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92dd8543-ce06-45a3-a970-6a273631e853 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.342722] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 885.344911] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b22bbc81-5e1c-4848-842c-7850ed2f29a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.349123] env[69475]: DEBUG oslo_vmware.api [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 885.349123] env[69475]: value = "task-3508287" [ 885.349123] env[69475]: _type = "Task" [ 885.349123] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.358970] env[69475]: DEBUG oslo_vmware.api [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508287, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.485737] env[69475]: DEBUG nova.network.neutron [None req-ac5a263b-55ac-4807-b1a2-950279e3e885 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating instance_info_cache with network_info: [{"id": "4059da75-efc8-42ee-90b1-8202220d1621", "address": "fa:16:3e:1e:8b:99", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4059da75-ef", "ovs_interfaceid": "4059da75-efc8-42ee-90b1-8202220d1621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.540021] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.330s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 885.540021] env[69475]: DEBUG nova.compute.manager [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 885.542724] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.254s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 885.546539] env[69475]: INFO nova.compute.claims [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 885.573679] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: df73dd41-7455-4482-abb2-b61b26fcf403] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 885.606095] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 885.606473] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d374d4cd-382b-49b5-819f-8be437161978 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.615877] env[69475]: DEBUG oslo_vmware.api [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 885.615877] env[69475]: value = "task-3508288" [ 885.615877] env[69475]: _type = "Task" [ 885.615877] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.627142] env[69475]: DEBUG oslo_vmware.api [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508288, 'name': CloneVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.858907] env[69475]: DEBUG oslo_vmware.api [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508287, 'name': PowerOffVM_Task, 'duration_secs': 0.273327} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.859357] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 885.859357] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 885.859599] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-648a0145-61ae-4340-975c-90a90e9b1dd2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.926777] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 885.927036] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 885.927319] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Deleting the datastore file [datastore1] d1e5e08d-b41a-4655-997d-91fbd3581f00 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 885.927516] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54c4c9db-215d-4b17-920e-9fc60eb84f16 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.933725] env[69475]: DEBUG oslo_vmware.api [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 885.933725] env[69475]: value = "task-3508290" [ 885.933725] env[69475]: _type = "Task" [ 885.933725] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.944817] env[69475]: DEBUG oslo_vmware.api [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508290, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.989978] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac5a263b-55ac-4807-b1a2-950279e3e885 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.051766] env[69475]: DEBUG nova.compute.utils [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 886.055041] env[69475]: DEBUG nova.compute.manager [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 886.055041] env[69475]: DEBUG nova.network.neutron [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 886.076753] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: daef2117-0d9f-4c9e-99e7-1e8a65aa1f22] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 886.125579] env[69475]: DEBUG oslo_vmware.api [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508288, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.189633] env[69475]: DEBUG nova.policy [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a123051be3624b50ab42a4254f687767', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca5098b4aae94c08b3f8ffd66aae2e2c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 886.378711] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "78430e6a-b0a3-400b-91c4-effea838274a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.378989] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "78430e6a-b0a3-400b-91c4-effea838274a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.444026] env[69475]: DEBUG oslo_vmware.api [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508290, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136133} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.444557] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 886.444790] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 886.444978] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 886.445166] env[69475]: INFO nova.compute.manager [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Took 1.11 seconds to destroy the instance on the hypervisor. [ 886.445455] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 886.445640] env[69475]: DEBUG nova.compute.manager [-] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 886.445749] env[69475]: DEBUG nova.network.neutron [-] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 886.500027] env[69475]: DEBUG nova.compute.manager [None req-ac5a263b-55ac-4807-b1a2-950279e3e885 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 886.500349] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db3667d-c980-4258-8d7c-e10d73b63261 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.559047] env[69475]: DEBUG nova.compute.manager [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 886.583190] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 712e93b6-e797-4b9f-b39b-33373cede403] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 886.628494] env[69475]: DEBUG oslo_vmware.api [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508288, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.847950] env[69475]: DEBUG nova.network.neutron [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Successfully created port: 91ad3911-8ea3-4bb6-bcf5-fd800e27e57f {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 886.882953] env[69475]: DEBUG nova.compute.manager [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 887.079666] env[69475]: DEBUG nova.compute.manager [req-410f4946-86bc-493a-a1e3-c4886f64b73f req-e9b9f0b0-f57f-42bc-9b2d-f24b19dc80a0 service nova] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Received event network-vif-deleted-5c37f4d6-0e34-4637-ac7c-73daa6f83e42 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 887.079980] env[69475]: INFO nova.compute.manager [req-410f4946-86bc-493a-a1e3-c4886f64b73f req-e9b9f0b0-f57f-42bc-9b2d-f24b19dc80a0 service nova] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Neutron deleted interface 5c37f4d6-0e34-4637-ac7c-73daa6f83e42; detaching it from the instance and deleting it from the info cache [ 887.080087] env[69475]: DEBUG nova.network.neutron [req-410f4946-86bc-493a-a1e3-c4886f64b73f req-e9b9f0b0-f57f-42bc-9b2d-f24b19dc80a0 service nova] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.086791] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 3e332e28-5db5-4f04-8a47-95406da16e21] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 887.129012] env[69475]: DEBUG oslo_vmware.api [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508288, 'name': CloneVM_Task, 'duration_secs': 1.503232} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.132475] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Created linked-clone VM from snapshot [ 887.133815] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeae968a-a345-4b82-be50-cd7c617a05b6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.142810] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Uploading image 04a88c3d-f91d-41ae-b78d-8f3d116adc4c {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 887.146139] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798a726e-ef79-46cd-852d-72f482c70b4c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.155742] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e1f840-a0e6-4f33-80d2-3ebe9f86eddf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.195473] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb5fb380-10e1-4067-a541-7a5e6418a283 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.205427] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff7dd9b-eea3-4c65-a50b-6f995118c09c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.212056] env[69475]: DEBUG oslo_vmware.rw_handles [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 887.212056] env[69475]: value = "vm-701017" [ 887.212056] env[69475]: _type = "VirtualMachine" [ 887.212056] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 887.212288] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-afd73118-8f3d-4938-94e8-30c996daa440 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.223796] env[69475]: DEBUG nova.compute.provider_tree [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.228674] env[69475]: DEBUG oslo_vmware.rw_handles [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lease: (returnval){ [ 887.228674] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b1e540-b6a2-1c31-959e-fd67b8442f1c" [ 887.228674] env[69475]: _type = "HttpNfcLease" [ 887.228674] env[69475]: } obtained for exporting VM: (result){ [ 887.228674] env[69475]: value = "vm-701017" [ 887.228674] env[69475]: _type = "VirtualMachine" [ 887.228674] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 887.228967] env[69475]: DEBUG oslo_vmware.api [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the lease: (returnval){ [ 887.228967] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b1e540-b6a2-1c31-959e-fd67b8442f1c" [ 887.228967] env[69475]: _type = "HttpNfcLease" [ 887.228967] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 887.235720] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 887.235720] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b1e540-b6a2-1c31-959e-fd67b8442f1c" [ 887.235720] env[69475]: _type = "HttpNfcLease" [ 887.235720] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 887.439323] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.478255] env[69475]: DEBUG nova.network.neutron [-] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.521940] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0fc206e-0f7d-4e78-b9c0-34b87dce90fc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.529970] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ac5a263b-55ac-4807-b1a2-950279e3e885 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Doing hard reboot of VM {{(pid=69475) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 887.530270] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-6732fcdc-2415-4436-8164-ee2d6d9d3d83 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.535986] env[69475]: DEBUG oslo_vmware.api [None req-ac5a263b-55ac-4807-b1a2-950279e3e885 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 887.535986] env[69475]: value = "task-3508292" [ 887.535986] env[69475]: _type = "Task" [ 887.535986] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.544141] env[69475]: DEBUG oslo_vmware.api [None req-ac5a263b-55ac-4807-b1a2-950279e3e885 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508292, 'name': ResetVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.575379] env[69475]: DEBUG nova.compute.manager [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 887.587909] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4294052-b948-46b4-9dad-bc265660841d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.593121] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46f4464-8d1f-48dc-a700-2dde9630784b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.605817] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: b71882d4-537d-4a90-b43d-f8ac4ca0d90c] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 887.614099] env[69475]: DEBUG nova.virt.hardware [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 887.614269] env[69475]: DEBUG nova.virt.hardware [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 887.614348] env[69475]: DEBUG nova.virt.hardware [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 887.614481] env[69475]: DEBUG nova.virt.hardware [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 887.614632] env[69475]: DEBUG nova.virt.hardware [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 887.614790] env[69475]: DEBUG nova.virt.hardware [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 887.615426] env[69475]: DEBUG nova.virt.hardware [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 887.615426] env[69475]: DEBUG nova.virt.hardware [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 887.615426] env[69475]: DEBUG nova.virt.hardware [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 887.615703] env[69475]: DEBUG nova.virt.hardware [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 887.615703] env[69475]: DEBUG nova.virt.hardware [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 887.616525] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71514a9d-c552-44cc-afa5-53610c573288 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.630927] env[69475]: DEBUG nova.compute.manager [req-410f4946-86bc-493a-a1e3-c4886f64b73f req-e9b9f0b0-f57f-42bc-9b2d-f24b19dc80a0 service nova] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Detach interface failed, port_id=5c37f4d6-0e34-4637-ac7c-73daa6f83e42, reason: Instance d1e5e08d-b41a-4655-997d-91fbd3581f00 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 887.636052] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f131d9-8e65-4fdc-bfee-d440f85d669a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.725920] env[69475]: DEBUG nova.scheduler.client.report [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 887.738665] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 887.738665] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b1e540-b6a2-1c31-959e-fd67b8442f1c" [ 887.738665] env[69475]: _type = "HttpNfcLease" [ 887.738665] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 887.738928] env[69475]: DEBUG oslo_vmware.rw_handles [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 887.738928] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b1e540-b6a2-1c31-959e-fd67b8442f1c" [ 887.738928] env[69475]: _type = "HttpNfcLease" [ 887.738928] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 887.739727] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35fab85c-0e13-4200-94cb-3a88c0675477 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.747496] env[69475]: DEBUG oslo_vmware.rw_handles [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5247d9c3-acac-4bb4-8ea8-af1e5fbb676f/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 887.748045] env[69475]: DEBUG oslo_vmware.rw_handles [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5247d9c3-acac-4bb4-8ea8-af1e5fbb676f/disk-0.vmdk for reading. {{(pid=69475) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 887.872625] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-8602aa68-1fa1-476b-90fe-35d62f2a3377 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.980763] env[69475]: INFO nova.compute.manager [-] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Took 1.53 seconds to deallocate network for instance. [ 888.045600] env[69475]: DEBUG oslo_vmware.api [None req-ac5a263b-55ac-4807-b1a2-950279e3e885 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508292, 'name': ResetVM_Task, 'duration_secs': 0.152399} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.045878] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ac5a263b-55ac-4807-b1a2-950279e3e885 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Did hard reboot of VM {{(pid=69475) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 888.046193] env[69475]: DEBUG nova.compute.manager [None req-ac5a263b-55ac-4807-b1a2-950279e3e885 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 888.047024] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e7069b-6b1a-4d1f-a33f-648a478b2274 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.109675] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 2dd98ffd-b0e6-4447-9c82-57713dc37abd] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 888.235724] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.692s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.236169] env[69475]: DEBUG nova.compute.manager [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 888.240361] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.050s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.242059] env[69475]: INFO nova.compute.claims [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 888.492298] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.559685] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac5a263b-55ac-4807-b1a2-950279e3e885 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "baf27027-678d-4167-bb9b-df410aeb0e82" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.504s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.614457] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 00ba5cd8-3516-4059-bcda-c2d01e165e07] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 888.748324] env[69475]: DEBUG nova.compute.utils [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 888.752621] env[69475]: DEBUG nova.compute.manager [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 888.753056] env[69475]: DEBUG nova.network.neutron [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 888.859181] env[69475]: DEBUG nova.policy [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10edfc7213ac43b6a87eee0594e5bc22', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef7fb53bce6145da8fe1e2f8beb57807', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 889.119790] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: ed12921f-9be8-474d-958e-79dd16b8116e] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 889.222569] env[69475]: DEBUG nova.network.neutron [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Successfully created port: 4246ca40-af00-4315-b24a-c4e3217dfdb2 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 889.262514] env[69475]: DEBUG nova.compute.manager [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 889.267414] env[69475]: DEBUG nova.network.neutron [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Successfully updated port: 91ad3911-8ea3-4bb6-bcf5-fd800e27e57f {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 889.308671] env[69475]: DEBUG nova.compute.manager [req-75370f2a-7aa4-4e1f-9187-224e26e8ff9f req-078ba78d-037c-426c-9ee8-0f4b8095daa6 service nova] [instance: 82236043-3222-4134-8717-4c239ed12aba] Received event network-vif-plugged-91ad3911-8ea3-4bb6-bcf5-fd800e27e57f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 889.309505] env[69475]: DEBUG oslo_concurrency.lockutils [req-75370f2a-7aa4-4e1f-9187-224e26e8ff9f req-078ba78d-037c-426c-9ee8-0f4b8095daa6 service nova] Acquiring lock "82236043-3222-4134-8717-4c239ed12aba-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.309748] env[69475]: DEBUG oslo_concurrency.lockutils [req-75370f2a-7aa4-4e1f-9187-224e26e8ff9f req-078ba78d-037c-426c-9ee8-0f4b8095daa6 service nova] Lock "82236043-3222-4134-8717-4c239ed12aba-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.309928] env[69475]: DEBUG oslo_concurrency.lockutils [req-75370f2a-7aa4-4e1f-9187-224e26e8ff9f req-078ba78d-037c-426c-9ee8-0f4b8095daa6 service nova] Lock "82236043-3222-4134-8717-4c239ed12aba-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.310116] env[69475]: DEBUG nova.compute.manager [req-75370f2a-7aa4-4e1f-9187-224e26e8ff9f req-078ba78d-037c-426c-9ee8-0f4b8095daa6 service nova] [instance: 82236043-3222-4134-8717-4c239ed12aba] No waiting events found dispatching network-vif-plugged-91ad3911-8ea3-4bb6-bcf5-fd800e27e57f {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 889.310294] env[69475]: WARNING nova.compute.manager [req-75370f2a-7aa4-4e1f-9187-224e26e8ff9f req-078ba78d-037c-426c-9ee8-0f4b8095daa6 service nova] [instance: 82236043-3222-4134-8717-4c239ed12aba] Received unexpected event network-vif-plugged-91ad3911-8ea3-4bb6-bcf5-fd800e27e57f for instance with vm_state building and task_state spawning. [ 889.629679] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 6f530b86-2ed1-41db-929c-8a5dd61d931a] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 889.732363] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51ccbe70-87fb-4ff3-a293-c3c5d18bc7d7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.740289] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b787e032-630f-4617-855f-7db51333b73c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.780289] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.780289] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.780289] env[69475]: DEBUG nova.network.neutron [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 889.780289] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-807976be-3295-4aec-a647-50709cce4eef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.793023] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d26e57-8203-4909-9d72-3d981a2219db {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.806122] env[69475]: DEBUG nova.compute.provider_tree [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 890.133675] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 8fbabf86-be9e-47ec-8c4c-adea4c68abe8] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 890.284842] env[69475]: DEBUG nova.compute.manager [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 890.309854] env[69475]: DEBUG nova.scheduler.client.report [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 890.416504] env[69475]: DEBUG nova.virt.hardware [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 890.416807] env[69475]: DEBUG nova.virt.hardware [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 890.417012] env[69475]: DEBUG nova.virt.hardware [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 890.417231] env[69475]: DEBUG nova.virt.hardware [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 890.417383] env[69475]: DEBUG nova.virt.hardware [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 890.417544] env[69475]: DEBUG nova.virt.hardware [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 890.417761] env[69475]: DEBUG nova.virt.hardware [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 890.417920] env[69475]: DEBUG nova.virt.hardware [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 890.418104] env[69475]: DEBUG nova.virt.hardware [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 890.418271] env[69475]: DEBUG nova.virt.hardware [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 890.418445] env[69475]: DEBUG nova.virt.hardware [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 890.419717] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3ebdaf-c07d-48cb-b6e4-6dcb7fc7e12c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.422838] env[69475]: DEBUG nova.network.neutron [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 890.430413] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-530888f8-a88a-4186-8f2d-2dd5667ab29f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.572010] env[69475]: DEBUG nova.network.neutron [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updating instance_info_cache with network_info: [{"id": "91ad3911-8ea3-4bb6-bcf5-fd800e27e57f", "address": "fa:16:3e:a7:cb:82", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91ad3911-8e", "ovs_interfaceid": "91ad3911-8ea3-4bb6-bcf5-fd800e27e57f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.638824] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 4c2e12bf-3f16-47de-a604-44b62a6c7137] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 890.660449] env[69475]: DEBUG nova.compute.manager [req-0195676e-5bd9-4e9b-9a0a-9f5d9ce39e77 req-2fdc20b5-94ed-4c78-9433-35a92fa07640 service nova] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Received event network-vif-plugged-4246ca40-af00-4315-b24a-c4e3217dfdb2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 890.660807] env[69475]: DEBUG oslo_concurrency.lockutils [req-0195676e-5bd9-4e9b-9a0a-9f5d9ce39e77 req-2fdc20b5-94ed-4c78-9433-35a92fa07640 service nova] Acquiring lock "a3ee83aa-f753-49e3-9db2-b1b67d6d211e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.661150] env[69475]: DEBUG oslo_concurrency.lockutils [req-0195676e-5bd9-4e9b-9a0a-9f5d9ce39e77 req-2fdc20b5-94ed-4c78-9433-35a92fa07640 service nova] Lock "a3ee83aa-f753-49e3-9db2-b1b67d6d211e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.661494] env[69475]: DEBUG oslo_concurrency.lockutils [req-0195676e-5bd9-4e9b-9a0a-9f5d9ce39e77 req-2fdc20b5-94ed-4c78-9433-35a92fa07640 service nova] Lock "a3ee83aa-f753-49e3-9db2-b1b67d6d211e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.661846] env[69475]: DEBUG nova.compute.manager [req-0195676e-5bd9-4e9b-9a0a-9f5d9ce39e77 req-2fdc20b5-94ed-4c78-9433-35a92fa07640 service nova] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] No waiting events found dispatching network-vif-plugged-4246ca40-af00-4315-b24a-c4e3217dfdb2 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 890.662190] env[69475]: WARNING nova.compute.manager [req-0195676e-5bd9-4e9b-9a0a-9f5d9ce39e77 req-2fdc20b5-94ed-4c78-9433-35a92fa07640 service nova] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Received unexpected event network-vif-plugged-4246ca40-af00-4315-b24a-c4e3217dfdb2 for instance with vm_state building and task_state spawning. [ 890.739226] env[69475]: DEBUG nova.network.neutron [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Successfully updated port: 4246ca40-af00-4315-b24a-c4e3217dfdb2 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 890.817380] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.575s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.817380] env[69475]: DEBUG nova.compute.manager [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 890.819498] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.879s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.819793] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.822374] env[69475]: DEBUG oslo_concurrency.lockutils [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.263s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.823949] env[69475]: INFO nova.compute.claims [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 890.877462] env[69475]: INFO nova.scheduler.client.report [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Deleted allocations for instance 56f0e59a-1c37-4977-81dc-da1a274ce7e7 [ 891.074243] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.074590] env[69475]: DEBUG nova.compute.manager [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Instance network_info: |[{"id": "91ad3911-8ea3-4bb6-bcf5-fd800e27e57f", "address": "fa:16:3e:a7:cb:82", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91ad3911-8e", "ovs_interfaceid": "91ad3911-8ea3-4bb6-bcf5-fd800e27e57f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 891.075030] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:cb:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91ad3911-8ea3-4bb6-bcf5-fd800e27e57f', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 891.082714] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Creating folder: Project (ca5098b4aae94c08b3f8ffd66aae2e2c). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 891.083007] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-71a152e3-6b8c-4531-b1a5-74a06b69bcbe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.093368] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Created folder: Project (ca5098b4aae94c08b3f8ffd66aae2e2c) in parent group-v700823. [ 891.093552] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Creating folder: Instances. Parent ref: group-v701018. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 891.093767] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1235e4fd-4239-4980-9274-30c0ef2c1b4d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.102163] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Created folder: Instances in parent group-v701018. [ 891.102452] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 891.102672] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82236043-3222-4134-8717-4c239ed12aba] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 891.102886] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d465041c-2b56-4614-bc4a-5f98e27adda2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.121471] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 891.121471] env[69475]: value = "task-3508295" [ 891.121471] env[69475]: _type = "Task" [ 891.121471] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.128754] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508295, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.142288] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: e48e2cc1-7d60-457f-8f1c-649f0dda8cdb] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 891.242285] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "refresh_cache-a3ee83aa-f753-49e3-9db2-b1b67d6d211e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.242285] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquired lock "refresh_cache-a3ee83aa-f753-49e3-9db2-b1b67d6d211e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.242285] env[69475]: DEBUG nova.network.neutron [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 891.329032] env[69475]: DEBUG nova.compute.utils [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 891.333820] env[69475]: DEBUG nova.compute.manager [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 891.333820] env[69475]: DEBUG nova.network.neutron [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 891.367398] env[69475]: DEBUG nova.compute.manager [req-ae39d8b8-8ee4-4c6c-9c57-78c11fb4655f req-ffa1be71-7e22-460e-a2e2-e5482cb73ce5 service nova] [instance: 82236043-3222-4134-8717-4c239ed12aba] Received event network-changed-91ad3911-8ea3-4bb6-bcf5-fd800e27e57f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 891.367398] env[69475]: DEBUG nova.compute.manager [req-ae39d8b8-8ee4-4c6c-9c57-78c11fb4655f req-ffa1be71-7e22-460e-a2e2-e5482cb73ce5 service nova] [instance: 82236043-3222-4134-8717-4c239ed12aba] Refreshing instance network info cache due to event network-changed-91ad3911-8ea3-4bb6-bcf5-fd800e27e57f. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 891.367708] env[69475]: DEBUG oslo_concurrency.lockutils [req-ae39d8b8-8ee4-4c6c-9c57-78c11fb4655f req-ffa1be71-7e22-460e-a2e2-e5482cb73ce5 service nova] Acquiring lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.367708] env[69475]: DEBUG oslo_concurrency.lockutils [req-ae39d8b8-8ee4-4c6c-9c57-78c11fb4655f req-ffa1be71-7e22-460e-a2e2-e5482cb73ce5 service nova] Acquired lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.367899] env[69475]: DEBUG nova.network.neutron [req-ae39d8b8-8ee4-4c6c-9c57-78c11fb4655f req-ffa1be71-7e22-460e-a2e2-e5482cb73ce5 service nova] [instance: 82236043-3222-4134-8717-4c239ed12aba] Refreshing network info cache for port 91ad3911-8ea3-4bb6-bcf5-fd800e27e57f {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 891.386487] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2f325eba-95a4-456a-8508-b176b771ce0c tempest-ServerGroupTestJSON-1312532339 tempest-ServerGroupTestJSON-1312532339-project-member] Lock "56f0e59a-1c37-4977-81dc-da1a274ce7e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.943s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.420063] env[69475]: DEBUG nova.policy [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2cadefdf967f4ef1b0c24f7bb0b7d6d3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dde7ecd407ae48f6a5d1b791df065d6e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 891.632628] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508295, 'name': CreateVM_Task, 'duration_secs': 0.389284} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.632808] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82236043-3222-4134-8717-4c239ed12aba] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 891.633546] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.633694] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.634014] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 891.634984] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1608e2ec-6bb4-4224-b822-4c4ce419bc73 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.639319] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 891.639319] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5270fadc-ddb5-be03-5d55-e47e3adeb5a4" [ 891.639319] env[69475]: _type = "Task" [ 891.639319] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.648515] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 9e2d4d61-71ed-447a-b28e-c29c5bd8d763] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 891.650492] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5270fadc-ddb5-be03-5d55-e47e3adeb5a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.727452] env[69475]: DEBUG nova.network.neutron [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Successfully created port: f9a10762-ba87-425f-9623-1ffdf22c5bb4 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 891.834062] env[69475]: DEBUG nova.compute.manager [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 891.856241] env[69475]: DEBUG nova.network.neutron [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 892.153459] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: fa2ca135-3cd2-411e-b1fc-35b93a97e75d] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 892.160038] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5270fadc-ddb5-be03-5d55-e47e3adeb5a4, 'name': SearchDatastore_Task, 'duration_secs': 0.023399} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.160865] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.161024] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 892.162344] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.162344] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.162344] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 892.162344] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a63067f0-42e8-41a2-bc98-d77e71ccca44 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.166525] env[69475]: DEBUG nova.network.neutron [req-ae39d8b8-8ee4-4c6c-9c57-78c11fb4655f req-ffa1be71-7e22-460e-a2e2-e5482cb73ce5 service nova] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updated VIF entry in instance network info cache for port 91ad3911-8ea3-4bb6-bcf5-fd800e27e57f. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 892.166921] env[69475]: DEBUG nova.network.neutron [req-ae39d8b8-8ee4-4c6c-9c57-78c11fb4655f req-ffa1be71-7e22-460e-a2e2-e5482cb73ce5 service nova] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updating instance_info_cache with network_info: [{"id": "91ad3911-8ea3-4bb6-bcf5-fd800e27e57f", "address": "fa:16:3e:a7:cb:82", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91ad3911-8e", "ovs_interfaceid": "91ad3911-8ea3-4bb6-bcf5-fd800e27e57f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.175221] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 892.175434] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 892.176209] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c736fb02-ef50-4aac-9fc7-44a914639953 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.182637] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 892.182637] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524634cf-a2c5-3864-29f5-cb29f4e3f244" [ 892.182637] env[69475]: _type = "Task" [ 892.182637] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.191193] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524634cf-a2c5-3864-29f5-cb29f4e3f244, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.195360] env[69475]: DEBUG nova.network.neutron [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Updating instance_info_cache with network_info: [{"id": "4246ca40-af00-4315-b24a-c4e3217dfdb2", "address": "fa:16:3e:0b:b8:24", "network": {"id": "b2ee7427-b6b5-4fb8-acdf-fa1d5ecaaeb1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-464853755-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ef7fb53bce6145da8fe1e2f8beb57807", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4246ca40-af", "ovs_interfaceid": "4246ca40-af00-4315-b24a-c4e3217dfdb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.316544] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc35bf2-650f-4882-b08f-b5585fb763a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.324063] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04d79618-4988-4044-9834-3e8863bc119c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.357365] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e906a7-e79a-44ef-a74a-3946019535d1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.365362] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786308d2-713f-4155-9ef2-cb1462900281 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.380750] env[69475]: DEBUG nova.compute.provider_tree [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.662071] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: b87cac84-ea70-428b-872e-4f6145e36b39] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 892.670585] env[69475]: DEBUG oslo_concurrency.lockutils [req-ae39d8b8-8ee4-4c6c-9c57-78c11fb4655f req-ffa1be71-7e22-460e-a2e2-e5482cb73ce5 service nova] Releasing lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.697374] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524634cf-a2c5-3864-29f5-cb29f4e3f244, 'name': SearchDatastore_Task, 'duration_secs': 0.024682} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.697836] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Releasing lock "refresh_cache-a3ee83aa-f753-49e3-9db2-b1b67d6d211e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.698148] env[69475]: DEBUG nova.compute.manager [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Instance network_info: |[{"id": "4246ca40-af00-4315-b24a-c4e3217dfdb2", "address": "fa:16:3e:0b:b8:24", "network": {"id": "b2ee7427-b6b5-4fb8-acdf-fa1d5ecaaeb1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-464853755-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ef7fb53bce6145da8fe1e2f8beb57807", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4246ca40-af", "ovs_interfaceid": "4246ca40-af00-4315-b24a-c4e3217dfdb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 892.699101] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0b:b8:24', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '457c42cd-4ddb-4374-923e-d419b7f6eaff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4246ca40-af00-4315-b24a-c4e3217dfdb2', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 892.708271] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 892.708271] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef5dc925-3c7d-4693-be1f-86eb957844b2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.711593] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 892.712799] env[69475]: DEBUG nova.compute.manager [req-b8469b28-e46d-4a5d-8057-f27f96295975 req-97d3b45a-596c-4d59-980c-3589dadaeb73 service nova] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Received event network-changed-4246ca40-af00-4315-b24a-c4e3217dfdb2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 892.712975] env[69475]: DEBUG nova.compute.manager [req-b8469b28-e46d-4a5d-8057-f27f96295975 req-97d3b45a-596c-4d59-980c-3589dadaeb73 service nova] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Refreshing instance network info cache due to event network-changed-4246ca40-af00-4315-b24a-c4e3217dfdb2. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 892.713195] env[69475]: DEBUG oslo_concurrency.lockutils [req-b8469b28-e46d-4a5d-8057-f27f96295975 req-97d3b45a-596c-4d59-980c-3589dadaeb73 service nova] Acquiring lock "refresh_cache-a3ee83aa-f753-49e3-9db2-b1b67d6d211e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.713329] env[69475]: DEBUG oslo_concurrency.lockutils [req-b8469b28-e46d-4a5d-8057-f27f96295975 req-97d3b45a-596c-4d59-980c-3589dadaeb73 service nova] Acquired lock "refresh_cache-a3ee83aa-f753-49e3-9db2-b1b67d6d211e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.713520] env[69475]: DEBUG nova.network.neutron [req-b8469b28-e46d-4a5d-8057-f27f96295975 req-97d3b45a-596c-4d59-980c-3589dadaeb73 service nova] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Refreshing network info cache for port 4246ca40-af00-4315-b24a-c4e3217dfdb2 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 892.717042] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba29e69a-e467-4f03-b3c4-f075bdfb27d5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.735231] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 892.735231] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52714962-5ca7-ef16-8209-e05463b3bfcb" [ 892.735231] env[69475]: _type = "Task" [ 892.735231] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.744352] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52714962-5ca7-ef16-8209-e05463b3bfcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.790190] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 892.790190] env[69475]: value = "task-3508296" [ 892.790190] env[69475]: _type = "Task" [ 892.790190] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.800820] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508296, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.861649] env[69475]: DEBUG nova.compute.manager [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 892.884418] env[69475]: DEBUG nova.scheduler.client.report [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 892.904870] env[69475]: DEBUG nova.virt.hardware [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:39:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='2d6dab00-d5b5-4904-b4d4-4a46ef0cf4b2',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2018768159',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 892.905310] env[69475]: DEBUG nova.virt.hardware [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 892.906200] env[69475]: DEBUG nova.virt.hardware [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 892.906200] env[69475]: DEBUG nova.virt.hardware [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 892.906200] env[69475]: DEBUG nova.virt.hardware [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 892.906200] env[69475]: DEBUG nova.virt.hardware [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 892.906568] env[69475]: DEBUG nova.virt.hardware [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 892.907083] env[69475]: DEBUG nova.virt.hardware [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 892.907083] env[69475]: DEBUG nova.virt.hardware [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 892.907247] env[69475]: DEBUG nova.virt.hardware [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 892.907297] env[69475]: DEBUG nova.virt.hardware [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 892.908953] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081d8c66-4b36-4249-a622-8ed363c24f25 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.922044] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8d49861-203f-4bd6-accc-4dc224549da8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.137533] env[69475]: DEBUG nova.network.neutron [req-b8469b28-e46d-4a5d-8057-f27f96295975 req-97d3b45a-596c-4d59-980c-3589dadaeb73 service nova] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Updated VIF entry in instance network info cache for port 4246ca40-af00-4315-b24a-c4e3217dfdb2. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 893.137612] env[69475]: DEBUG nova.network.neutron [req-b8469b28-e46d-4a5d-8057-f27f96295975 req-97d3b45a-596c-4d59-980c-3589dadaeb73 service nova] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Updating instance_info_cache with network_info: [{"id": "4246ca40-af00-4315-b24a-c4e3217dfdb2", "address": "fa:16:3e:0b:b8:24", "network": {"id": "b2ee7427-b6b5-4fb8-acdf-fa1d5ecaaeb1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-464853755-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ef7fb53bce6145da8fe1e2f8beb57807", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4246ca40-af", "ovs_interfaceid": "4246ca40-af00-4315-b24a-c4e3217dfdb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.165088] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 3149cd80-503c-42e4-ac91-54aababe84e3] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 893.246443] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52714962-5ca7-ef16-8209-e05463b3bfcb, 'name': SearchDatastore_Task, 'duration_secs': 0.012778} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.246805] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.247145] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 82236043-3222-4134-8717-4c239ed12aba/82236043-3222-4134-8717-4c239ed12aba.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 893.247756] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c44d243e-3c32-45c3-8285-2c164dc02744 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.255153] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 893.255153] env[69475]: value = "task-3508297" [ 893.255153] env[69475]: _type = "Task" [ 893.255153] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.272336] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508297, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.302234] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508296, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.390325] env[69475]: DEBUG oslo_concurrency.lockutils [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.568s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.390818] env[69475]: DEBUG nova.compute.manager [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 893.393540] env[69475]: DEBUG oslo_concurrency.lockutils [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.763s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.393832] env[69475]: DEBUG oslo_concurrency.lockutils [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.398230] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.474s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.398230] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.398479] env[69475]: DEBUG oslo_concurrency.lockutils [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.354s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.398653] env[69475]: DEBUG oslo_concurrency.lockutils [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.400416] env[69475]: DEBUG oslo_concurrency.lockutils [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.345s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.400479] env[69475]: DEBUG oslo_concurrency.lockutils [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.405930] env[69475]: DEBUG oslo_concurrency.lockutils [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.312s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.405930] env[69475]: DEBUG oslo_concurrency.lockutils [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.405930] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 17.702s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.410572] env[69475]: DEBUG nova.network.neutron [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Successfully updated port: f9a10762-ba87-425f-9623-1ffdf22c5bb4 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 893.441602] env[69475]: INFO nova.scheduler.client.report [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Deleted allocations for instance 7be48799-ea4a-4e7f-95c2-637460596cfc [ 893.447513] env[69475]: INFO nova.scheduler.client.report [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Deleted allocations for instance 93607154-f135-4925-9c3a-a97051535b00 [ 893.468755] env[69475]: INFO nova.scheduler.client.report [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Deleted allocations for instance 619a87e7-097c-41af-8452-5437b82e7ebe [ 893.491426] env[69475]: INFO nova.scheduler.client.report [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Deleted allocations for instance 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9 [ 893.508413] env[69475]: INFO nova.scheduler.client.report [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Deleted allocations for instance 2ade2ed6-4725-4913-8ac4-14a96ced3e4b [ 893.641128] env[69475]: DEBUG oslo_concurrency.lockutils [req-b8469b28-e46d-4a5d-8057-f27f96295975 req-97d3b45a-596c-4d59-980c-3589dadaeb73 service nova] Releasing lock "refresh_cache-a3ee83aa-f753-49e3-9db2-b1b67d6d211e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.669795] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 8f65d893-d2e2-452f-8870-f72ec036f16a] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 893.770094] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508297, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.804488] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508296, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.907837] env[69475]: DEBUG nova.compute.utils [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 893.911833] env[69475]: DEBUG nova.compute.manager [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 893.911833] env[69475]: DEBUG nova.network.neutron [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 893.919190] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "refresh_cache-b1b04eb9-ded6-4425-8a06-0c26c086a09b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.919190] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquired lock "refresh_cache-b1b04eb9-ded6-4425-8a06-0c26c086a09b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 893.919190] env[69475]: DEBUG nova.network.neutron [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 893.961363] env[69475]: DEBUG oslo_concurrency.lockutils [None req-33ec3a40-2393-48da-b133-eaad1022c0de tempest-VolumesAdminNegativeTest-1855579597 tempest-VolumesAdminNegativeTest-1855579597-project-member] Lock "93607154-f135-4925-9c3a-a97051535b00" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.011s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.962605] env[69475]: DEBUG oslo_concurrency.lockutils [None req-069ab74c-68cd-44db-b5ad-9075a3c26e97 tempest-ServersV294TestFqdnHostnames-1767580808 tempest-ServersV294TestFqdnHostnames-1767580808-project-member] Lock "7be48799-ea4a-4e7f-95c2-637460596cfc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.756s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.981700] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fe11af27-f3db-4182-81f2-3cfc2a48d787 tempest-ServerMetadataTestJSON-1277105959 tempest-ServerMetadataTestJSON-1277105959-project-member] Lock "619a87e7-097c-41af-8452-5437b82e7ebe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.139s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.986821] env[69475]: DEBUG nova.policy [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f28bd2be7e614a278dcc996d469ba7be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '776ae12e87e9437b8144fa2be4bc3e9f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 893.999971] env[69475]: DEBUG oslo_concurrency.lockutils [None req-33a965da-f2f3-405c-b9d5-847435eb3879 tempest-ListImageFiltersTestJSON-282139983 tempest-ListImageFiltersTestJSON-282139983-project-member] Lock "7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.388s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.021383] env[69475]: DEBUG oslo_concurrency.lockutils [None req-95bd29f4-34a6-4433-b2fc-f08013368db7 tempest-ServersTestMultiNic-273377454 tempest-ServersTestMultiNic-273377454-project-member] Lock "2ade2ed6-4725-4913-8ac4-14a96ced3e4b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.059s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.174359] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: c078753c-48a6-490b-8d7d-b0832eced25e] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 894.274613] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508297, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.309440] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508296, 'name': CreateVM_Task, 'duration_secs': 1.46602} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.309620] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 894.310340] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.310509] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 894.310843] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 894.311157] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6eec3d6-e3d2-40f2-8fa1-9edd8d98f92b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.316927] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 894.316927] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5298f875-4d47-1155-6404-ed193dc2494c" [ 894.316927] env[69475]: _type = "Task" [ 894.316927] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.330836] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5298f875-4d47-1155-6404-ed193dc2494c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.382523] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c404842-8953-460e-b4c7-d0ffc9061fef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.391197] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e60094-ae82-4672-9a3a-bd98dbae823a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.425388] env[69475]: DEBUG nova.compute.manager [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 894.430393] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67c1dc3-6d2d-4838-b30d-9ad135065ef4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.446027] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02810c40-d3dd-4f9f-908d-965a183328b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.463356] env[69475]: DEBUG nova.compute.provider_tree [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.663181] env[69475]: DEBUG nova.network.neutron [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Successfully created port: 08bb22ea-bdd1-4469-b276-1932d3bd682f {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 894.678409] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 3eda17da-111c-412d-9af4-d3a40b7d8faa] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 894.770600] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508297, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.404634} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.770861] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 82236043-3222-4134-8717-4c239ed12aba/82236043-3222-4134-8717-4c239ed12aba.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 894.771098] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 894.771664] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5a52239d-e15c-437d-80d8-08185df21165 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.778690] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 894.778690] env[69475]: value = "task-3508298" [ 894.778690] env[69475]: _type = "Task" [ 894.778690] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.788865] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508298, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.837020] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5298f875-4d47-1155-6404-ed193dc2494c, 'name': SearchDatastore_Task, 'duration_secs': 0.068744} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.837020] env[69475]: DEBUG nova.network.neutron [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 894.837287] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 894.837681] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 894.838101] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 894.838308] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 894.838532] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 894.838949] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec37ff8f-8731-4279-b1f1-8fd7c6a34105 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.849868] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 894.850079] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 894.851028] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-705c9c93-bc7d-4a83-b871-309b05496326 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.856187] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 894.856187] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522dcd6b-3f59-befd-9133-fb75bcf8b6d0" [ 894.856187] env[69475]: _type = "Task" [ 894.856187] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.866787] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522dcd6b-3f59-befd-9133-fb75bcf8b6d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.966860] env[69475]: DEBUG nova.scheduler.client.report [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 895.124964] env[69475]: DEBUG nova.compute.manager [req-ab24795d-3dae-4532-8bc8-218200b0f099 req-226d681b-e78e-4952-848e-5bbda9a9909a service nova] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Received event network-vif-plugged-f9a10762-ba87-425f-9623-1ffdf22c5bb4 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 895.124964] env[69475]: DEBUG oslo_concurrency.lockutils [req-ab24795d-3dae-4532-8bc8-218200b0f099 req-226d681b-e78e-4952-848e-5bbda9a9909a service nova] Acquiring lock "b1b04eb9-ded6-4425-8a06-0c26c086a09b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.124964] env[69475]: DEBUG oslo_concurrency.lockutils [req-ab24795d-3dae-4532-8bc8-218200b0f099 req-226d681b-e78e-4952-848e-5bbda9a9909a service nova] Lock "b1b04eb9-ded6-4425-8a06-0c26c086a09b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.124964] env[69475]: DEBUG oslo_concurrency.lockutils [req-ab24795d-3dae-4532-8bc8-218200b0f099 req-226d681b-e78e-4952-848e-5bbda9a9909a service nova] Lock "b1b04eb9-ded6-4425-8a06-0c26c086a09b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.124964] env[69475]: DEBUG nova.compute.manager [req-ab24795d-3dae-4532-8bc8-218200b0f099 req-226d681b-e78e-4952-848e-5bbda9a9909a service nova] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] No waiting events found dispatching network-vif-plugged-f9a10762-ba87-425f-9623-1ffdf22c5bb4 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 895.124964] env[69475]: WARNING nova.compute.manager [req-ab24795d-3dae-4532-8bc8-218200b0f099 req-226d681b-e78e-4952-848e-5bbda9a9909a service nova] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Received unexpected event network-vif-plugged-f9a10762-ba87-425f-9623-1ffdf22c5bb4 for instance with vm_state building and task_state spawning. [ 895.124964] env[69475]: DEBUG nova.compute.manager [req-ab24795d-3dae-4532-8bc8-218200b0f099 req-226d681b-e78e-4952-848e-5bbda9a9909a service nova] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Received event network-changed-f9a10762-ba87-425f-9623-1ffdf22c5bb4 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 895.124964] env[69475]: DEBUG nova.compute.manager [req-ab24795d-3dae-4532-8bc8-218200b0f099 req-226d681b-e78e-4952-848e-5bbda9a9909a service nova] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Refreshing instance network info cache due to event network-changed-f9a10762-ba87-425f-9623-1ffdf22c5bb4. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 895.124964] env[69475]: DEBUG oslo_concurrency.lockutils [req-ab24795d-3dae-4532-8bc8-218200b0f099 req-226d681b-e78e-4952-848e-5bbda9a9909a service nova] Acquiring lock "refresh_cache-b1b04eb9-ded6-4425-8a06-0c26c086a09b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.183152] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 77a5665d-b00f-42c2-a1e8-319dfd232b06] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 895.192270] env[69475]: DEBUG nova.network.neutron [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Updating instance_info_cache with network_info: [{"id": "f9a10762-ba87-425f-9623-1ffdf22c5bb4", "address": "fa:16:3e:83:ce:8a", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.106", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9a10762-ba", "ovs_interfaceid": "f9a10762-ba87-425f-9623-1ffdf22c5bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.289993] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508298, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105846} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.290527] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 895.291346] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b959a71a-2a05-4e16-ac6f-bd27e763423d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.321428] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 82236043-3222-4134-8717-4c239ed12aba/82236043-3222-4134-8717-4c239ed12aba.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 895.321699] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b0d0706-94fe-4a43-aa96-023126a94a15 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.350983] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 895.350983] env[69475]: value = "task-3508299" [ 895.350983] env[69475]: _type = "Task" [ 895.350983] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.364404] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508299, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.372210] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522dcd6b-3f59-befd-9133-fb75bcf8b6d0, 'name': SearchDatastore_Task, 'duration_secs': 0.009959} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.372210] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd59316a-bf0c-4d2f-ab72-eda695627a98 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.378164] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 895.378164] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5284c4a6-1297-9483-a297-f1fe4c6308e9" [ 895.378164] env[69475]: _type = "Task" [ 895.378164] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.386768] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5284c4a6-1297-9483-a297-f1fe4c6308e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.436742] env[69475]: DEBUG nova.compute.manager [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 895.469560] env[69475]: DEBUG nova.virt.hardware [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 895.469793] env[69475]: DEBUG nova.virt.hardware [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 895.469999] env[69475]: DEBUG nova.virt.hardware [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 895.470133] env[69475]: DEBUG nova.virt.hardware [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 895.470275] env[69475]: DEBUG nova.virt.hardware [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 895.470450] env[69475]: DEBUG nova.virt.hardware [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 895.470736] env[69475]: DEBUG nova.virt.hardware [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 895.470984] env[69475]: DEBUG nova.virt.hardware [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 895.471195] env[69475]: DEBUG nova.virt.hardware [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 895.472255] env[69475]: DEBUG nova.virt.hardware [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 895.472255] env[69475]: DEBUG nova.virt.hardware [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 895.477799] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3260dd-0283-4fcb-9298-c835e7a32134 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.487269] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c1c60d-8ff7-4cf3-babb-579217ca4b00 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.686618] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 3c253a57-1c93-4e8d-aaa1-1331c0547d85] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 895.696512] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Releasing lock "refresh_cache-b1b04eb9-ded6-4425-8a06-0c26c086a09b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 895.699190] env[69475]: DEBUG nova.compute.manager [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Instance network_info: |[{"id": "f9a10762-ba87-425f-9623-1ffdf22c5bb4", "address": "fa:16:3e:83:ce:8a", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.106", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9a10762-ba", "ovs_interfaceid": "f9a10762-ba87-425f-9623-1ffdf22c5bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 895.699556] env[69475]: DEBUG oslo_concurrency.lockutils [req-ab24795d-3dae-4532-8bc8-218200b0f099 req-226d681b-e78e-4952-848e-5bbda9a9909a service nova] Acquired lock "refresh_cache-b1b04eb9-ded6-4425-8a06-0c26c086a09b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 895.699844] env[69475]: DEBUG nova.network.neutron [req-ab24795d-3dae-4532-8bc8-218200b0f099 req-226d681b-e78e-4952-848e-5bbda9a9909a service nova] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Refreshing network info cache for port f9a10762-ba87-425f-9623-1ffdf22c5bb4 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 895.701479] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:ce:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f9a10762-ba87-425f-9623-1ffdf22c5bb4', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 895.715922] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 895.717257] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 895.717492] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4022c2bf-1fbf-4b86-ba25-a0fba9df0587 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.739724] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 895.739724] env[69475]: value = "task-3508300" [ 895.739724] env[69475]: _type = "Task" [ 895.739724] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.749962] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508300, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.863915] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508299, 'name': ReconfigVM_Task, 'duration_secs': 0.300517} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.864326] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 82236043-3222-4134-8717-4c239ed12aba/82236043-3222-4134-8717-4c239ed12aba.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 895.865022] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-28a0f52a-6643-42ce-89fb-8fe0b7613cfb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.871814] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 895.871814] env[69475]: value = "task-3508301" [ 895.871814] env[69475]: _type = "Task" [ 895.871814] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.884745] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508301, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.899735] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5284c4a6-1297-9483-a297-f1fe4c6308e9, 'name': SearchDatastore_Task, 'duration_secs': 0.011375} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.899735] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 895.899735] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] a3ee83aa-f753-49e3-9db2-b1b67d6d211e/a3ee83aa-f753-49e3-9db2-b1b67d6d211e.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 895.899735] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f6f7375-055b-4011-a73b-73484f929c86 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.909189] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 895.909189] env[69475]: value = "task-3508302" [ 895.909189] env[69475]: _type = "Task" [ 895.909189] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.918316] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508302, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.983928] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.579s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.987196] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.984s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.989758] env[69475]: INFO nova.compute.claims [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 896.192724] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: d1a316d5-59ef-4286-9d7e-a444ffadc49d] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 896.252747] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508300, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.383947] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508301, 'name': Rename_Task, 'duration_secs': 0.149266} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.384281] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 896.384557] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9e07ce81-d35b-41d0-bf24-a2e7929dea6b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.391491] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 896.391491] env[69475]: value = "task-3508303" [ 896.391491] env[69475]: _type = "Task" [ 896.391491] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.401178] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508303, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.420274] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508302, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.596913] env[69475]: INFO nova.scheduler.client.report [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleted allocation for migration 7e88bc9f-10f3-40da-8081-c14e8c051ac4 [ 896.699118] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: c3db35f4-f43d-464c-9556-18a90866ee6a] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 896.753551] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508300, 'name': CreateVM_Task, 'duration_secs': 0.628756} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.753731] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 896.755506] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.755696] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.756095] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 896.756371] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d5fcc62-b43c-44dc-82e8-584ed9253f38 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.768341] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 896.768341] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520c107c-6221-2172-206c-24665fbd4189" [ 896.768341] env[69475]: _type = "Task" [ 896.768341] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.779718] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520c107c-6221-2172-206c-24665fbd4189, 'name': SearchDatastore_Task, 'duration_secs': 0.00995} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.782400] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 896.782400] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 896.782400] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 896.782400] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 896.782400] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 896.782400] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20806965-84bc-4a05-a677-62a2fde683da {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.789634] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 896.789820] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 896.790637] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0d60edd-e511-4c08-ba63-070f36cdde97 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.796623] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 896.796623] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5281d6e3-9b22-8335-7032-9e20a9117508" [ 896.796623] env[69475]: _type = "Task" [ 896.796623] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.805253] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5281d6e3-9b22-8335-7032-9e20a9117508, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.870122] env[69475]: DEBUG oslo_vmware.rw_handles [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5247d9c3-acac-4bb4-8ea8-af1e5fbb676f/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 896.870268] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72af02fc-34bc-4975-9587-538527a3fa9f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.876665] env[69475]: DEBUG oslo_vmware.rw_handles [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5247d9c3-acac-4bb4-8ea8-af1e5fbb676f/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 896.876787] env[69475]: ERROR oslo_vmware.rw_handles [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5247d9c3-acac-4bb4-8ea8-af1e5fbb676f/disk-0.vmdk due to incomplete transfer. [ 896.877033] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-335982e4-d89b-4988-a14b-ba6555c94854 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.885399] env[69475]: DEBUG oslo_vmware.rw_handles [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5247d9c3-acac-4bb4-8ea8-af1e5fbb676f/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 896.885603] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Uploaded image 04a88c3d-f91d-41ae-b78d-8f3d116adc4c to the Glance image server {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 896.887791] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 896.888065] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0fdcae55-57a6-4029-9002-146bc90dbe1d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.898723] env[69475]: DEBUG oslo_vmware.api [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 896.898723] env[69475]: value = "task-3508304" [ 896.898723] env[69475]: _type = "Task" [ 896.898723] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.898723] env[69475]: DEBUG nova.network.neutron [req-ab24795d-3dae-4532-8bc8-218200b0f099 req-226d681b-e78e-4952-848e-5bbda9a9909a service nova] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Updated VIF entry in instance network info cache for port f9a10762-ba87-425f-9623-1ffdf22c5bb4. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 896.898926] env[69475]: DEBUG nova.network.neutron [req-ab24795d-3dae-4532-8bc8-218200b0f099 req-226d681b-e78e-4952-848e-5bbda9a9909a service nova] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Updating instance_info_cache with network_info: [{"id": "f9a10762-ba87-425f-9623-1ffdf22c5bb4", "address": "fa:16:3e:83:ce:8a", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.106", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9a10762-ba", "ovs_interfaceid": "f9a10762-ba87-425f-9623-1ffdf22c5bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 896.909494] env[69475]: DEBUG oslo_vmware.api [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508303, 'name': PowerOnVM_Task, 'duration_secs': 0.46875} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.910830] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 896.911078] env[69475]: INFO nova.compute.manager [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Took 9.34 seconds to spawn the instance on the hypervisor. [ 896.911308] env[69475]: DEBUG nova.compute.manager [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 896.915994] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c416f337-5ee0-4b5a-bc0b-e07b1b7f6b0c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.923237] env[69475]: DEBUG oslo_vmware.api [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508304, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.929565] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508302, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.582174} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.931601] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] a3ee83aa-f753-49e3-9db2-b1b67d6d211e/a3ee83aa-f753-49e3-9db2-b1b67d6d211e.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 896.931601] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 896.934756] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d745abdb-7fc1-4e8d-bfc0-b77c70d18aa6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.942675] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 896.942675] env[69475]: value = "task-3508305" [ 896.942675] env[69475]: _type = "Task" [ 896.942675] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.952364] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508305, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.104506] env[69475]: DEBUG oslo_concurrency.lockutils [None req-22e5bdf5-bfcb-49a4-a0d9-07e4c590454b tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "86464a01-e034-43b6-a6d5-45f9e3b6715b" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 24.789s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.208149] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: a22a4d65-56eb-4313-bd0e-81148981f5b8] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 897.307566] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5281d6e3-9b22-8335-7032-9e20a9117508, 'name': SearchDatastore_Task, 'duration_secs': 0.009369} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.308356] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bca3501e-4c05-41a8-aaff-3ab8da68ca88 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.317093] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 897.317093] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d92bb5-f418-472a-4a10-f5731a81772e" [ 897.317093] env[69475]: _type = "Task" [ 897.317093] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.329346] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d92bb5-f418-472a-4a10-f5731a81772e, 'name': SearchDatastore_Task, 'duration_secs': 0.00995} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.329649] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.329864] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] b1b04eb9-ded6-4425-8a06-0c26c086a09b/b1b04eb9-ded6-4425-8a06-0c26c086a09b.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 897.330348] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-88851ffd-5347-4d71-800a-b2d8312a5d79 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.334977] env[69475]: DEBUG nova.compute.manager [req-ea9da717-34d7-4e8d-81fc-fd1707fb1ddd req-f1c910e7-fa96-427c-bc8e-e4711f82555f service nova] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Received event network-vif-plugged-08bb22ea-bdd1-4469-b276-1932d3bd682f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 897.335710] env[69475]: DEBUG oslo_concurrency.lockutils [req-ea9da717-34d7-4e8d-81fc-fd1707fb1ddd req-f1c910e7-fa96-427c-bc8e-e4711f82555f service nova] Acquiring lock "0a65565c-c679-47e5-8606-832fe3876af6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.335710] env[69475]: DEBUG oslo_concurrency.lockutils [req-ea9da717-34d7-4e8d-81fc-fd1707fb1ddd req-f1c910e7-fa96-427c-bc8e-e4711f82555f service nova] Lock "0a65565c-c679-47e5-8606-832fe3876af6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.335710] env[69475]: DEBUG oslo_concurrency.lockutils [req-ea9da717-34d7-4e8d-81fc-fd1707fb1ddd req-f1c910e7-fa96-427c-bc8e-e4711f82555f service nova] Lock "0a65565c-c679-47e5-8606-832fe3876af6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.335710] env[69475]: DEBUG nova.compute.manager [req-ea9da717-34d7-4e8d-81fc-fd1707fb1ddd req-f1c910e7-fa96-427c-bc8e-e4711f82555f service nova] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] No waiting events found dispatching network-vif-plugged-08bb22ea-bdd1-4469-b276-1932d3bd682f {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 897.335710] env[69475]: WARNING nova.compute.manager [req-ea9da717-34d7-4e8d-81fc-fd1707fb1ddd req-f1c910e7-fa96-427c-bc8e-e4711f82555f service nova] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Received unexpected event network-vif-plugged-08bb22ea-bdd1-4469-b276-1932d3bd682f for instance with vm_state building and task_state spawning. [ 897.337957] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 897.337957] env[69475]: value = "task-3508306" [ 897.337957] env[69475]: _type = "Task" [ 897.337957] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.350129] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508306, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.410316] env[69475]: DEBUG oslo_vmware.api [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508304, 'name': Destroy_Task, 'duration_secs': 0.33143} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.410316] env[69475]: DEBUG oslo_concurrency.lockutils [req-ab24795d-3dae-4532-8bc8-218200b0f099 req-226d681b-e78e-4952-848e-5bbda9a9909a service nova] Releasing lock "refresh_cache-b1b04eb9-ded6-4425-8a06-0c26c086a09b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 897.410316] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Destroyed the VM [ 897.410574] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 897.414423] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-46b5b836-1465-43ab-91eb-e6dd6c490a71 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.423834] env[69475]: DEBUG oslo_vmware.api [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 897.423834] env[69475]: value = "task-3508307" [ 897.423834] env[69475]: _type = "Task" [ 897.423834] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.432586] env[69475]: DEBUG oslo_vmware.api [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508307, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.447773] env[69475]: INFO nova.compute.manager [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Took 49.92 seconds to build instance. [ 897.460711] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508305, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07021} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.460711] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 897.460846] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9a9e2d-240c-4a58-a8bc-d3f212e1efe6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.507296] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] a3ee83aa-f753-49e3-9db2-b1b67d6d211e/a3ee83aa-f753-49e3-9db2-b1b67d6d211e.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 897.509442] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eff4a371-d5ac-4c20-a5e0-925f0cb329a7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.527984] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02331f9d-0f8f-4694-aa4d-e9f27affc2d2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.544559] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a7432f-ed76-46b5-a054-c35fb1c2ec83 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.549047] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 897.549047] env[69475]: value = "task-3508308" [ 897.549047] env[69475]: _type = "Task" [ 897.549047] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.584976] env[69475]: DEBUG nova.network.neutron [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Successfully updated port: 08bb22ea-bdd1-4469-b276-1932d3bd682f {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 897.587114] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ab45a94-3b81-40cf-97b3-2c39d27a2de0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.594215] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508308, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.603381] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6310c086-0fa2-4dff-9abb-cb0c50869ded {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.624780] env[69475]: DEBUG nova.compute.provider_tree [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 897.710771] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 7516b200-60b8-4cf2-aa0d-2ebc30c4b3c3] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 897.852519] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508306, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51054} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.852887] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] b1b04eb9-ded6-4425-8a06-0c26c086a09b/b1b04eb9-ded6-4425-8a06-0c26c086a09b.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 897.853159] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 897.853432] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-de9b836b-c527-4252-a1f3-fc7cc39f61e7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.860336] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 897.860336] env[69475]: value = "task-3508309" [ 897.860336] env[69475]: _type = "Task" [ 897.860336] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.869380] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508309, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.934223] env[69475]: DEBUG oslo_vmware.api [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508307, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.954040] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ac7b1dc3-3469-478b-99fa-324d86dd33a3 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "82236043-3222-4134-8717-4c239ed12aba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.436s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.059750] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508308, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.091321] env[69475]: DEBUG oslo_concurrency.lockutils [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Acquiring lock "refresh_cache-0a65565c-c679-47e5-8606-832fe3876af6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.091321] env[69475]: DEBUG oslo_concurrency.lockutils [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Acquired lock "refresh_cache-0a65565c-c679-47e5-8606-832fe3876af6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 898.091321] env[69475]: DEBUG nova.network.neutron [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 898.130250] env[69475]: DEBUG nova.scheduler.client.report [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 898.214365] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: af5dc581-cf6a-4b84-8bcf-96606ae07cc1] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 898.373844] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508309, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077677} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.373844] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 898.374150] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdded51-5356-44c2-8d07-a66a706b7eee {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.398438] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] b1b04eb9-ded6-4425-8a06-0c26c086a09b/b1b04eb9-ded6-4425-8a06-0c26c086a09b.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 898.398438] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2cd53ef9-ce32-4874-994e-4557cc56391d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.421578] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 898.421578] env[69475]: value = "task-3508310" [ 898.421578] env[69475]: _type = "Task" [ 898.421578] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.439516] env[69475]: DEBUG oslo_vmware.api [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508307, 'name': RemoveSnapshot_Task, 'duration_secs': 0.847286} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.442019] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508310, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.442019] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 898.442019] env[69475]: INFO nova.compute.manager [None req-d2c385a6-040a-4c8d-a302-9c15c54eb647 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Took 15.60 seconds to snapshot the instance on the hypervisor. [ 898.566949] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508308, 'name': ReconfigVM_Task, 'duration_secs': 0.590652} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.567075] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Reconfigured VM instance instance-00000045 to attach disk [datastore1] a3ee83aa-f753-49e3-9db2-b1b67d6d211e/a3ee83aa-f753-49e3-9db2-b1b67d6d211e.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 898.567737] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f91eb6ae-264b-40bd-b5c1-fcb3fa61a6de {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.578097] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 898.578097] env[69475]: value = "task-3508311" [ 898.578097] env[69475]: _type = "Task" [ 898.578097] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.586101] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508311, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.640743] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.653s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.641295] env[69475]: DEBUG nova.compute.manager [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 898.648538] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.248s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.654948] env[69475]: INFO nova.compute.claims [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 898.665465] env[69475]: DEBUG nova.network.neutron [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 898.719186] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 67287947-ecce-4462-8268-23bcc7421766] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 898.936843] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508310, 'name': ReconfigVM_Task, 'duration_secs': 0.288691} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.940335] env[69475]: DEBUG nova.network.neutron [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Updating instance_info_cache with network_info: [{"id": "08bb22ea-bdd1-4469-b276-1932d3bd682f", "address": "fa:16:3e:8e:e2:e3", "network": {"id": "650ae7e4-7543-473c-abd7-c16a87fbd693", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1484136106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "776ae12e87e9437b8144fa2be4bc3e9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08bb22ea-bd", "ovs_interfaceid": "08bb22ea-bdd1-4469-b276-1932d3bd682f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.942513] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Reconfigured VM instance instance-00000046 to attach disk [datastore1] b1b04eb9-ded6-4425-8a06-0c26c086a09b/b1b04eb9-ded6-4425-8a06-0c26c086a09b.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 898.942759] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-64fcf69d-56cd-4d48-92a5-6d3bba47fad0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.955431] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 898.955431] env[69475]: value = "task-3508312" [ 898.955431] env[69475]: _type = "Task" [ 898.955431] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.966409] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508312, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.994615] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.994615] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.086784] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508311, 'name': Rename_Task, 'duration_secs': 0.140134} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.086784] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 899.086949] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b060f709-f835-4078-b259-a5a98aaa2bc8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.094201] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 899.094201] env[69475]: value = "task-3508313" [ 899.094201] env[69475]: _type = "Task" [ 899.094201] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.103346] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508313, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.164954] env[69475]: DEBUG nova.compute.utils [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 899.169720] env[69475]: DEBUG nova.compute.manager [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 899.169720] env[69475]: DEBUG nova.network.neutron [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 899.222187] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: b255f4d7-b177-4d6c-8a28-dcb5a179c1c0] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 899.318832] env[69475]: DEBUG nova.policy [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc7e62ede743400197923eebd7318481', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72b480b7835d47a18d77bfe4a983f017', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 899.375301] env[69475]: DEBUG nova.compute.manager [req-0a26554d-e54c-490f-8310-39beac742096 req-8200ee75-e58b-4f5b-942a-b9beb36dc248 service nova] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Received event network-changed-08bb22ea-bdd1-4469-b276-1932d3bd682f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 899.375494] env[69475]: DEBUG nova.compute.manager [req-0a26554d-e54c-490f-8310-39beac742096 req-8200ee75-e58b-4f5b-942a-b9beb36dc248 service nova] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Refreshing instance network info cache due to event network-changed-08bb22ea-bdd1-4469-b276-1932d3bd682f. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 899.375682] env[69475]: DEBUG oslo_concurrency.lockutils [req-0a26554d-e54c-490f-8310-39beac742096 req-8200ee75-e58b-4f5b-942a-b9beb36dc248 service nova] Acquiring lock "refresh_cache-0a65565c-c679-47e5-8606-832fe3876af6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.443930] env[69475]: DEBUG oslo_concurrency.lockutils [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Releasing lock "refresh_cache-0a65565c-c679-47e5-8606-832fe3876af6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.444505] env[69475]: DEBUG nova.compute.manager [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Instance network_info: |[{"id": "08bb22ea-bdd1-4469-b276-1932d3bd682f", "address": "fa:16:3e:8e:e2:e3", "network": {"id": "650ae7e4-7543-473c-abd7-c16a87fbd693", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1484136106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "776ae12e87e9437b8144fa2be4bc3e9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08bb22ea-bd", "ovs_interfaceid": "08bb22ea-bdd1-4469-b276-1932d3bd682f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 899.445195] env[69475]: DEBUG oslo_concurrency.lockutils [req-0a26554d-e54c-490f-8310-39beac742096 req-8200ee75-e58b-4f5b-942a-b9beb36dc248 service nova] Acquired lock "refresh_cache-0a65565c-c679-47e5-8606-832fe3876af6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.445719] env[69475]: DEBUG nova.network.neutron [req-0a26554d-e54c-490f-8310-39beac742096 req-8200ee75-e58b-4f5b-942a-b9beb36dc248 service nova] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Refreshing network info cache for port 08bb22ea-bdd1-4469-b276-1932d3bd682f {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 899.451334] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8e:e2:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dcf5c3f7-4e33-4f21-b323-3673930b789c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '08bb22ea-bdd1-4469-b276-1932d3bd682f', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 899.460704] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Creating folder: Project (776ae12e87e9437b8144fa2be4bc3e9f). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 899.466497] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02a8348d-0e17-4d46-ad1a-1ab60e46ac39 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.487500] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508312, 'name': Rename_Task, 'duration_secs': 0.159734} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.490016] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 899.491880] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Created folder: Project (776ae12e87e9437b8144fa2be4bc3e9f) in parent group-v700823. [ 899.492331] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Creating folder: Instances. Parent ref: group-v701023. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 899.492669] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-df1cf764-7263-4b3e-8b0a-07c6fec1b2df {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.494904] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6f2dcedd-8f23-4ee2-837b-9270f698aa7f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.497743] env[69475]: DEBUG nova.compute.manager [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 899.511179] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 899.511179] env[69475]: value = "task-3508315" [ 899.511179] env[69475]: _type = "Task" [ 899.511179] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.517093] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Created folder: Instances in parent group-v701023. [ 899.517437] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 899.518092] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 899.518341] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-da068661-fd6d-429a-8fc3-c547c9da8d84 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.543665] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508315, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.551216] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 899.551216] env[69475]: value = "task-3508317" [ 899.551216] env[69475]: _type = "Task" [ 899.551216] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.562057] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508317, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.609526] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508313, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.672034] env[69475]: DEBUG nova.compute.manager [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 899.730079] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 25c44ae0-4193-4833-85ec-ebc0ef3cf593] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 900.024096] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508315, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.039203] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.042587] env[69475]: DEBUG nova.network.neutron [req-0a26554d-e54c-490f-8310-39beac742096 req-8200ee75-e58b-4f5b-942a-b9beb36dc248 service nova] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Updated VIF entry in instance network info cache for port 08bb22ea-bdd1-4469-b276-1932d3bd682f. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 900.043019] env[69475]: DEBUG nova.network.neutron [req-0a26554d-e54c-490f-8310-39beac742096 req-8200ee75-e58b-4f5b-942a-b9beb36dc248 service nova] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Updating instance_info_cache with network_info: [{"id": "08bb22ea-bdd1-4469-b276-1932d3bd682f", "address": "fa:16:3e:8e:e2:e3", "network": {"id": "650ae7e4-7543-473c-abd7-c16a87fbd693", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1484136106-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "776ae12e87e9437b8144fa2be4bc3e9f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dcf5c3f7-4e33-4f21-b323-3673930b789c", "external-id": "nsx-vlan-transportzone-983", "segmentation_id": 983, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap08bb22ea-bd", "ovs_interfaceid": "08bb22ea-bdd1-4469-b276-1932d3bd682f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.062343] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508317, 'name': CreateVM_Task, 'duration_secs': 0.371204} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.063209] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 900.063945] env[69475]: DEBUG oslo_concurrency.lockutils [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.064128] env[69475]: DEBUG oslo_concurrency.lockutils [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.064436] env[69475]: DEBUG oslo_concurrency.lockutils [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 900.067446] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51a9887d-a9ef-4b27-9d21-25f2dc611eea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.073743] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Waiting for the task: (returnval){ [ 900.073743] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dcf711-e7e0-a3cb-d97e-a53e2102b26f" [ 900.073743] env[69475]: _type = "Task" [ 900.073743] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.081914] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dcf711-e7e0-a3cb-d97e-a53e2102b26f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.106085] env[69475]: DEBUG oslo_vmware.api [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508313, 'name': PowerOnVM_Task, 'duration_secs': 0.577489} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.106220] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 900.106403] env[69475]: INFO nova.compute.manager [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Took 9.82 seconds to spawn the instance on the hypervisor. [ 900.106581] env[69475]: DEBUG nova.compute.manager [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 900.107513] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a78734b-11b6-4f42-be0e-79889f9463e9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.161876] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d655d17a-f67c-43ef-8309-3acfc62a6461 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.177458] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeba76fc-e2d5-4de7-b936-2db4edced873 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.219133] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f098966-22e4-4412-914f-898ed4c22c7b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.227575] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db89df33-1279-406d-b1df-9cc760f9aad4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.241582] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 9cfd8425-c1aa-4dbc-afa4-3a5aa10428de] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 900.243974] env[69475]: DEBUG nova.compute.provider_tree [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.527417] env[69475]: DEBUG oslo_vmware.api [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508315, 'name': PowerOnVM_Task, 'duration_secs': 0.836351} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.527417] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 900.527417] env[69475]: INFO nova.compute.manager [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Took 7.66 seconds to spawn the instance on the hypervisor. [ 900.527417] env[69475]: DEBUG nova.compute.manager [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 900.528073] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65ba6fb1-71bc-4d01-afef-e484bdb27f25 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.551382] env[69475]: DEBUG oslo_concurrency.lockutils [req-0a26554d-e54c-490f-8310-39beac742096 req-8200ee75-e58b-4f5b-942a-b9beb36dc248 service nova] Releasing lock "refresh_cache-0a65565c-c679-47e5-8606-832fe3876af6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.585210] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dcf711-e7e0-a3cb-d97e-a53e2102b26f, 'name': SearchDatastore_Task, 'duration_secs': 0.020415} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.585567] env[69475]: DEBUG oslo_concurrency.lockutils [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.585829] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 900.588188] env[69475]: DEBUG oslo_concurrency.lockutils [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.588188] env[69475]: DEBUG oslo_concurrency.lockutils [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.588188] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 900.588188] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a91d1af1-cb47-42aa-b3a5-b1aa6f4fd746 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.595906] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 900.596551] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 900.597702] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-faff3099-5c69-4452-99be-d26cd942c41a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.605076] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Waiting for the task: (returnval){ [ 900.605076] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52083c31-3e76-3339-b218-db35da64b22a" [ 900.605076] env[69475]: _type = "Task" [ 900.605076] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.612834] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52083c31-3e76-3339-b218-db35da64b22a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.632792] env[69475]: INFO nova.compute.manager [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Took 52.36 seconds to build instance. [ 900.673747] env[69475]: DEBUG oslo_concurrency.lockutils [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "86464a01-e034-43b6-a6d5-45f9e3b6715b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.674245] env[69475]: DEBUG oslo_concurrency.lockutils [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "86464a01-e034-43b6-a6d5-45f9e3b6715b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.674516] env[69475]: DEBUG oslo_concurrency.lockutils [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "86464a01-e034-43b6-a6d5-45f9e3b6715b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.674732] env[69475]: DEBUG oslo_concurrency.lockutils [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "86464a01-e034-43b6-a6d5-45f9e3b6715b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 900.674908] env[69475]: DEBUG oslo_concurrency.lockutils [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "86464a01-e034-43b6-a6d5-45f9e3b6715b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.677964] env[69475]: INFO nova.compute.manager [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Terminating instance [ 900.691717] env[69475]: DEBUG nova.compute.manager [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 900.744720] env[69475]: DEBUG nova.virt.hardware [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 900.745025] env[69475]: DEBUG nova.virt.hardware [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 900.746533] env[69475]: DEBUG nova.virt.hardware [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 900.746533] env[69475]: DEBUG nova.virt.hardware [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 900.746533] env[69475]: DEBUG nova.virt.hardware [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 900.746533] env[69475]: DEBUG nova.virt.hardware [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 900.746533] env[69475]: DEBUG nova.virt.hardware [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 900.746953] env[69475]: DEBUG nova.virt.hardware [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 900.747128] env[69475]: DEBUG nova.virt.hardware [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 900.747225] env[69475]: DEBUG nova.virt.hardware [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 900.747406] env[69475]: DEBUG nova.virt.hardware [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 900.748091] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 48bc79bc-df56-4523-808f-a71b391062b9] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 900.753545] env[69475]: DEBUG nova.scheduler.client.report [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 900.757540] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa325baf-bcc3-4e55-950f-8c7db3186bda {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.769710] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb8c21d-1bff-4bb6-a905-f39cf522ee87 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.887959] env[69475]: DEBUG nova.network.neutron [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Successfully created port: 277b3f9d-a1c5-4f1b-be8a-4818987fd78e {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 901.055588] env[69475]: INFO nova.compute.manager [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Took 50.88 seconds to build instance. [ 901.118487] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52083c31-3e76-3339-b218-db35da64b22a, 'name': SearchDatastore_Task, 'duration_secs': 0.013071} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.119552] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41bc4c54-38b3-4616-8c21-e7e31580b91e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.129112] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Waiting for the task: (returnval){ [ 901.129112] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f55d1-9a6d-e052-b772-0127e98ea09d" [ 901.129112] env[69475]: _type = "Task" [ 901.129112] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.141211] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c351186f-34b1-4f45-896d-e251aea605ed tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "a3ee83aa-f753-49e3-9db2-b1b67d6d211e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.878s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.144268] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f55d1-9a6d-e052-b772-0127e98ea09d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.192214] env[69475]: DEBUG nova.compute.manager [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 901.192214] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 901.192214] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c4bfa4-4af6-437b-899c-4cc6705ce8bf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.204714] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 901.204839] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5c7c85fc-073a-4e74-ad89-c7c7f9b3d23d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.212782] env[69475]: DEBUG oslo_vmware.api [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 901.212782] env[69475]: value = "task-3508318" [ 901.212782] env[69475]: _type = "Task" [ 901.212782] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.222387] env[69475]: DEBUG oslo_vmware.api [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508318, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.267251] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.614s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.267251] env[69475]: DEBUG nova.compute.manager [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 901.272469] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 4465f156-09cc-4eba-90e4-be76f3010363] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 901.276508] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.967s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.276508] env[69475]: DEBUG nova.objects.instance [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lazy-loading 'resources' on Instance uuid b41845c6-46bd-4b3b-ab26-d7d2dad08f84 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.409622] env[69475]: INFO nova.compute.manager [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Rescuing [ 901.409919] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "refresh_cache-a3ee83aa-f753-49e3-9db2-b1b67d6d211e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.410082] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquired lock "refresh_cache-a3ee83aa-f753-49e3-9db2-b1b67d6d211e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 901.410278] env[69475]: DEBUG nova.network.neutron [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 901.562707] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a806d438-ed27-4f9e-a57d-39c60af2ca4a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "b1b04eb9-ded6-4425-8a06-0c26c086a09b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.396s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.646807] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f55d1-9a6d-e052-b772-0127e98ea09d, 'name': SearchDatastore_Task, 'duration_secs': 0.017091} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.647063] env[69475]: DEBUG oslo_concurrency.lockutils [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.647366] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 0a65565c-c679-47e5-8606-832fe3876af6/0a65565c-c679-47e5-8606-832fe3876af6.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 901.647648] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1854ac9f-c73d-4941-9a7c-1b0278fbfe5d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.655395] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Waiting for the task: (returnval){ [ 901.655395] env[69475]: value = "task-3508319" [ 901.655395] env[69475]: _type = "Task" [ 901.655395] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.657924] env[69475]: DEBUG nova.compute.manager [req-3598ad9d-b851-4d37-b91d-7f410965cab3 req-deaa3e43-65a1-40a0-9d7e-94446f1e8b67 service nova] [instance: 82236043-3222-4134-8717-4c239ed12aba] Received event network-changed-91ad3911-8ea3-4bb6-bcf5-fd800e27e57f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 901.658206] env[69475]: DEBUG nova.compute.manager [req-3598ad9d-b851-4d37-b91d-7f410965cab3 req-deaa3e43-65a1-40a0-9d7e-94446f1e8b67 service nova] [instance: 82236043-3222-4134-8717-4c239ed12aba] Refreshing instance network info cache due to event network-changed-91ad3911-8ea3-4bb6-bcf5-fd800e27e57f. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 901.658371] env[69475]: DEBUG oslo_concurrency.lockutils [req-3598ad9d-b851-4d37-b91d-7f410965cab3 req-deaa3e43-65a1-40a0-9d7e-94446f1e8b67 service nova] Acquiring lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.658511] env[69475]: DEBUG oslo_concurrency.lockutils [req-3598ad9d-b851-4d37-b91d-7f410965cab3 req-deaa3e43-65a1-40a0-9d7e-94446f1e8b67 service nova] Acquired lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 901.658719] env[69475]: DEBUG nova.network.neutron [req-3598ad9d-b851-4d37-b91d-7f410965cab3 req-deaa3e43-65a1-40a0-9d7e-94446f1e8b67 service nova] [instance: 82236043-3222-4134-8717-4c239ed12aba] Refreshing network info cache for port 91ad3911-8ea3-4bb6-bcf5-fd800e27e57f {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 901.674500] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': task-3508319, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.723819] env[69475]: DEBUG oslo_vmware.api [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508318, 'name': PowerOffVM_Task, 'duration_secs': 0.430436} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.724333] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 901.724508] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 901.724761] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-98f67e41-2ddd-4e04-a15b-797fc29251b1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.776552] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: e1ecc905-22da-434a-8ddf-a66f88ab47fb] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 901.781240] env[69475]: DEBUG nova.compute.utils [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 901.784471] env[69475]: DEBUG nova.compute.manager [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 901.784640] env[69475]: DEBUG nova.network.neutron [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 901.796389] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 901.796389] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 901.796389] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleting the datastore file [datastore2] 86464a01-e034-43b6-a6d5-45f9e3b6715b {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 901.796389] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d553f9b-4369-4fd9-adc5-084951060b4b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.802924] env[69475]: DEBUG oslo_vmware.api [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 901.802924] env[69475]: value = "task-3508321" [ 901.802924] env[69475]: _type = "Task" [ 901.802924] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.815403] env[69475]: DEBUG oslo_vmware.api [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508321, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.844883] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.845138] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.882834] env[69475]: DEBUG nova.policy [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc7e62ede743400197923eebd7318481', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72b480b7835d47a18d77bfe4a983f017', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 902.215534] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': task-3508319, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.292242] env[69475]: DEBUG nova.compute.manager [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 902.297922] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: dc2614b1-95b8-4887-8ca6-efe92921c926] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 902.318978] env[69475]: DEBUG oslo_vmware.api [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508321, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.304044} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.319716] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 902.319716] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 902.320117] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 902.320117] env[69475]: INFO nova.compute.manager [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 902.321157] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 902.321278] env[69475]: DEBUG nova.compute.manager [-] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 902.321485] env[69475]: DEBUG nova.network.neutron [-] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 902.350505] env[69475]: DEBUG nova.compute.manager [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 902.417495] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4580edd-108f-4b00-a27d-a52fcaaf6da9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.425668] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd89428-ed8d-4e5c-9b9c-c636df573501 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.460106] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8102a7f1-8741-4b17-897a-6f2a105abb4f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.468182] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d17579-b0aa-4e71-96ac-0b2536321cc4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.484734] env[69475]: DEBUG nova.compute.provider_tree [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.633185] env[69475]: DEBUG nova.network.neutron [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Updating instance_info_cache with network_info: [{"id": "4246ca40-af00-4315-b24a-c4e3217dfdb2", "address": "fa:16:3e:0b:b8:24", "network": {"id": "b2ee7427-b6b5-4fb8-acdf-fa1d5ecaaeb1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-464853755-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ef7fb53bce6145da8fe1e2f8beb57807", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4246ca40-af", "ovs_interfaceid": "4246ca40-af00-4315-b24a-c4e3217dfdb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.669969] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': task-3508319, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.624571} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.670142] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 0a65565c-c679-47e5-8606-832fe3876af6/0a65565c-c679-47e5-8606-832fe3876af6.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 902.670367] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 902.671264] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-07b395b0-d9e6-485b-9128-579a0cccfb25 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.679852] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Waiting for the task: (returnval){ [ 902.679852] env[69475]: value = "task-3508322" [ 902.679852] env[69475]: _type = "Task" [ 902.679852] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.689628] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': task-3508322, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.811514] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: ec7a6b3c-2a2f-4edd-8b79-ba55551d6159] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 902.885801] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.989180] env[69475]: DEBUG nova.scheduler.client.report [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 903.111151] env[69475]: DEBUG nova.network.neutron [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Successfully created port: 85c87dc2-a1dc-4c52-9f42-7af24dfa8791 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 903.135765] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Releasing lock "refresh_cache-a3ee83aa-f753-49e3-9db2-b1b67d6d211e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 903.190780] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': task-3508322, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103165} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.191315] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 903.192567] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d7fddc-d8cc-4431-b223-1adeaeab4794 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.221995] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 0a65565c-c679-47e5-8606-832fe3876af6/0a65565c-c679-47e5-8606-832fe3876af6.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 903.226015] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b3bb8fd4-8b5d-40f7-94ae-078d329a7972 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.246459] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Waiting for the task: (returnval){ [ 903.246459] env[69475]: value = "task-3508323" [ 903.246459] env[69475]: _type = "Task" [ 903.246459] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.256699] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': task-3508323, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.313656] env[69475]: DEBUG nova.compute.manager [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 903.317294] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 91d5b0db-63a5-4290-af9b-264a5ce4cd95] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 903.341566] env[69475]: DEBUG nova.virt.hardware [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 903.342742] env[69475]: DEBUG nova.virt.hardware [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 903.343008] env[69475]: DEBUG nova.virt.hardware [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 903.343627] env[69475]: DEBUG nova.virt.hardware [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 903.343627] env[69475]: DEBUG nova.virt.hardware [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 903.343627] env[69475]: DEBUG nova.virt.hardware [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 903.343829] env[69475]: DEBUG nova.virt.hardware [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 903.343889] env[69475]: DEBUG nova.virt.hardware [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 903.344361] env[69475]: DEBUG nova.virt.hardware [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 903.344361] env[69475]: DEBUG nova.virt.hardware [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 903.344472] env[69475]: DEBUG nova.virt.hardware [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 903.345368] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4ad954-6f54-47bc-8236-fda65d673e6b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.353995] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd7bd4c4-d2d0-4616-b757-28791737edd8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.411454] env[69475]: DEBUG nova.network.neutron [req-3598ad9d-b851-4d37-b91d-7f410965cab3 req-deaa3e43-65a1-40a0-9d7e-94446f1e8b67 service nova] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updated VIF entry in instance network info cache for port 91ad3911-8ea3-4bb6-bcf5-fd800e27e57f. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 903.411823] env[69475]: DEBUG nova.network.neutron [req-3598ad9d-b851-4d37-b91d-7f410965cab3 req-deaa3e43-65a1-40a0-9d7e-94446f1e8b67 service nova] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updating instance_info_cache with network_info: [{"id": "91ad3911-8ea3-4bb6-bcf5-fd800e27e57f", "address": "fa:16:3e:a7:cb:82", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91ad3911-8e", "ovs_interfaceid": "91ad3911-8ea3-4bb6-bcf5-fd800e27e57f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.497186] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.224s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 903.500907] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.021s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 903.505898] env[69475]: INFO nova.compute.claims [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 903.546269] env[69475]: DEBUG nova.network.neutron [-] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.558776] env[69475]: INFO nova.scheduler.client.report [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Deleted allocations for instance b41845c6-46bd-4b3b-ab26-d7d2dad08f84 [ 903.758641] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': task-3508323, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.771211] env[69475]: DEBUG nova.network.neutron [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Successfully updated port: 277b3f9d-a1c5-4f1b-be8a-4818987fd78e {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 903.820340] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 903.821627] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Cleaning up deleted instances with incomplete migration {{(pid=69475) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 903.914966] env[69475]: DEBUG oslo_concurrency.lockutils [req-3598ad9d-b851-4d37-b91d-7f410965cab3 req-deaa3e43-65a1-40a0-9d7e-94446f1e8b67 service nova] Releasing lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.050129] env[69475]: INFO nova.compute.manager [-] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Took 1.73 seconds to deallocate network for instance. [ 904.057864] env[69475]: DEBUG nova.compute.manager [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Stashing vm_state: active {{(pid=69475) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 904.073671] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2065569e-45fb-4b16-a6a4-f9a1fb054181 tempest-FloatingIPsAssociationTestJSON-819956809 tempest-FloatingIPsAssociationTestJSON-819956809-project-member] Lock "b41845c6-46bd-4b3b-ab26-d7d2dad08f84" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.681s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.150628] env[69475]: DEBUG nova.compute.manager [req-d457ae89-1072-49bc-b77a-b693eec7f0c8 req-6c610124-ce92-46fc-ad19-adf56b3162e4 service nova] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Received event network-vif-deleted-858c37b6-4824-46d3-9dff-c0e0d91c47b5 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 904.151074] env[69475]: DEBUG nova.compute.manager [req-d457ae89-1072-49bc-b77a-b693eec7f0c8 req-6c610124-ce92-46fc-ad19-adf56b3162e4 service nova] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Received event network-vif-plugged-277b3f9d-a1c5-4f1b-be8a-4818987fd78e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 904.151402] env[69475]: DEBUG oslo_concurrency.lockutils [req-d457ae89-1072-49bc-b77a-b693eec7f0c8 req-6c610124-ce92-46fc-ad19-adf56b3162e4 service nova] Acquiring lock "f40aa0bb-af1d-4f8f-a906-f1c83307b465-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.151763] env[69475]: DEBUG oslo_concurrency.lockutils [req-d457ae89-1072-49bc-b77a-b693eec7f0c8 req-6c610124-ce92-46fc-ad19-adf56b3162e4 service nova] Lock "f40aa0bb-af1d-4f8f-a906-f1c83307b465-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.152082] env[69475]: DEBUG oslo_concurrency.lockutils [req-d457ae89-1072-49bc-b77a-b693eec7f0c8 req-6c610124-ce92-46fc-ad19-adf56b3162e4 service nova] Lock "f40aa0bb-af1d-4f8f-a906-f1c83307b465-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.152376] env[69475]: DEBUG nova.compute.manager [req-d457ae89-1072-49bc-b77a-b693eec7f0c8 req-6c610124-ce92-46fc-ad19-adf56b3162e4 service nova] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] No waiting events found dispatching network-vif-plugged-277b3f9d-a1c5-4f1b-be8a-4818987fd78e {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 904.152694] env[69475]: WARNING nova.compute.manager [req-d457ae89-1072-49bc-b77a-b693eec7f0c8 req-6c610124-ce92-46fc-ad19-adf56b3162e4 service nova] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Received unexpected event network-vif-plugged-277b3f9d-a1c5-4f1b-be8a-4818987fd78e for instance with vm_state building and task_state spawning. [ 904.265127] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': task-3508323, 'name': ReconfigVM_Task, 'duration_secs': 0.926601} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.265127] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 0a65565c-c679-47e5-8606-832fe3876af6/0a65565c-c679-47e5-8606-832fe3876af6.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 904.265127] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a44f63bd-efc6-4fac-bbb3-5254370d70d0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.273804] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "refresh_cache-f40aa0bb-af1d-4f8f-a906-f1c83307b465" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.273949] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquired lock "refresh_cache-f40aa0bb-af1d-4f8f-a906-f1c83307b465" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 904.274186] env[69475]: DEBUG nova.network.neutron [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 904.278083] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Waiting for the task: (returnval){ [ 904.278083] env[69475]: value = "task-3508324" [ 904.278083] env[69475]: _type = "Task" [ 904.278083] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.290829] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': task-3508324, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.324433] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 904.398728] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Acquiring lock "78b5496c-f8e2-4681-a36b-50897b0f7325" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.399102] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Lock "78b5496c-f8e2-4681-a36b-50897b0f7325" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.579430] env[69475]: DEBUG oslo_concurrency.lockutils [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.588671] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 904.684468] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 904.685043] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-baad6293-1677-43bb-adad-abdbff82a3c0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.694180] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 904.694180] env[69475]: value = "task-3508325" [ 904.694180] env[69475]: _type = "Task" [ 904.694180] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.714989] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508325, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.793581] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': task-3508324, 'name': Rename_Task, 'duration_secs': 0.270465} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.793836] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 904.796866] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8732fe68-00dc-499b-bedf-3c047bbbbe8e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.803863] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Waiting for the task: (returnval){ [ 904.803863] env[69475]: value = "task-3508326" [ 904.803863] env[69475]: _type = "Task" [ 904.803863] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.813133] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': task-3508326, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.867298] env[69475]: DEBUG nova.network.neutron [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 904.902115] env[69475]: DEBUG nova.compute.manager [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 905.072524] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c16a7f-6a05-45d5-824c-2639e32c52bd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.080126] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27d97bf-7a3d-4fe1-be77-51882afeac69 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.118037] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e4a444-b853-4626-bdc9-2d4b6fbc34f8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.126161] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f7fe2ec-908a-4085-9709-a3f5c29a1fdb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.144495] env[69475]: DEBUG nova.compute.provider_tree [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.204052] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508325, 'name': PowerOffVM_Task, 'duration_secs': 0.231882} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.204052] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 905.204804] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d822bb4-cdbd-40a2-8b5d-366a4d6ea779 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.224602] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32d4527-8676-4149-bd36-f8aadb241678 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.253264] env[69475]: DEBUG nova.network.neutron [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Updating instance_info_cache with network_info: [{"id": "277b3f9d-a1c5-4f1b-be8a-4818987fd78e", "address": "fa:16:3e:94:d7:8c", "network": {"id": "6fbdc01c-f41b-4684-9238-39afb8859a2b", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974902263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72b480b7835d47a18d77bfe4a983f017", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap277b3f9d-a1", "ovs_interfaceid": "277b3f9d-a1c5-4f1b-be8a-4818987fd78e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.292819] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 905.294656] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-af0dc345-3042-4069-a0e8-786763fc1bc9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.308025] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 905.308025] env[69475]: value = "task-3508327" [ 905.308025] env[69475]: _type = "Task" [ 905.308025] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.330052] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': task-3508326, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.330918] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] VM already powered off {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 905.330918] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 905.331046] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 905.331181] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 905.331360] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 905.331614] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d71be70d-6509-401c-8cbf-5e8417ff666c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.341298] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 905.341540] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 905.342541] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-105e5343-942c-40e4-a67e-6d7eabcedf07 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.349740] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 905.349740] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525470f2-9aed-d1e6-b625-764f41571633" [ 905.349740] env[69475]: _type = "Task" [ 905.349740] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.357678] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525470f2-9aed-d1e6-b625-764f41571633, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.449300] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.599221] env[69475]: DEBUG nova.compute.manager [req-5e983379-6be0-43b1-9379-0609521a3d86 req-d0c7c671-92c4-44ee-9d62-d0d159ee75a6 service nova] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Received event network-vif-plugged-85c87dc2-a1dc-4c52-9f42-7af24dfa8791 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 905.601088] env[69475]: DEBUG oslo_concurrency.lockutils [req-5e983379-6be0-43b1-9379-0609521a3d86 req-d0c7c671-92c4-44ee-9d62-d0d159ee75a6 service nova] Acquiring lock "9c27dcc3-67df-46ea-947d-b2ecdaeeb003-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.602247] env[69475]: DEBUG oslo_concurrency.lockutils [req-5e983379-6be0-43b1-9379-0609521a3d86 req-d0c7c671-92c4-44ee-9d62-d0d159ee75a6 service nova] Lock "9c27dcc3-67df-46ea-947d-b2ecdaeeb003-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.003s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 905.602443] env[69475]: DEBUG oslo_concurrency.lockutils [req-5e983379-6be0-43b1-9379-0609521a3d86 req-d0c7c671-92c4-44ee-9d62-d0d159ee75a6 service nova] Lock "9c27dcc3-67df-46ea-947d-b2ecdaeeb003-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 905.602652] env[69475]: DEBUG nova.compute.manager [req-5e983379-6be0-43b1-9379-0609521a3d86 req-d0c7c671-92c4-44ee-9d62-d0d159ee75a6 service nova] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] No waiting events found dispatching network-vif-plugged-85c87dc2-a1dc-4c52-9f42-7af24dfa8791 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 905.602908] env[69475]: WARNING nova.compute.manager [req-5e983379-6be0-43b1-9379-0609521a3d86 req-d0c7c671-92c4-44ee-9d62-d0d159ee75a6 service nova] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Received unexpected event network-vif-plugged-85c87dc2-a1dc-4c52-9f42-7af24dfa8791 for instance with vm_state building and task_state spawning. [ 905.648322] env[69475]: DEBUG nova.scheduler.client.report [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 905.739591] env[69475]: DEBUG nova.network.neutron [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Successfully updated port: 85c87dc2-a1dc-4c52-9f42-7af24dfa8791 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 905.754648] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Releasing lock "refresh_cache-f40aa0bb-af1d-4f8f-a906-f1c83307b465" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 905.754946] env[69475]: DEBUG nova.compute.manager [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Instance network_info: |[{"id": "277b3f9d-a1c5-4f1b-be8a-4818987fd78e", "address": "fa:16:3e:94:d7:8c", "network": {"id": "6fbdc01c-f41b-4684-9238-39afb8859a2b", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974902263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72b480b7835d47a18d77bfe4a983f017", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap277b3f9d-a1", "ovs_interfaceid": "277b3f9d-a1c5-4f1b-be8a-4818987fd78e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 905.755421] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:d7:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '72781990-3cb3-42eb-9eb1-4040dedbf66f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '277b3f9d-a1c5-4f1b-be8a-4818987fd78e', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 905.763216] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Creating folder: Project (72b480b7835d47a18d77bfe4a983f017). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 905.764166] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ce76c056-6129-4d6b-9c05-74641c930ba9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.777384] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Created folder: Project (72b480b7835d47a18d77bfe4a983f017) in parent group-v700823. [ 905.777384] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Creating folder: Instances. Parent ref: group-v701026. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 905.777384] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2ff57040-eca9-45c5-9248-43c378663d26 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.790420] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Created folder: Instances in parent group-v701026. [ 905.790716] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 905.790915] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 905.791152] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ee7f1095-4d42-429f-8856-286a7ab94ecb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.822362] env[69475]: DEBUG oslo_vmware.api [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': task-3508326, 'name': PowerOnVM_Task, 'duration_secs': 0.519167} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.823690] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 905.823913] env[69475]: INFO nova.compute.manager [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Took 10.39 seconds to spawn the instance on the hypervisor. [ 905.824104] env[69475]: DEBUG nova.compute.manager [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 905.824351] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 905.824351] env[69475]: value = "task-3508330" [ 905.824351] env[69475]: _type = "Task" [ 905.824351] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.825096] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9231a0ad-0528-4ec5-abef-0a8268ac1b50 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.839112] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508330, 'name': CreateVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.860287] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525470f2-9aed-d1e6-b625-764f41571633, 'name': SearchDatastore_Task, 'duration_secs': 0.030069} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.861471] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccac0d1c-6f50-4d65-b819-e3c9bce4e724 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.866689] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 905.866689] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5215c414-608f-2068-ce58-910621199358" [ 905.866689] env[69475]: _type = "Task" [ 905.866689] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.879290] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5215c414-608f-2068-ce58-910621199358, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.153864] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.652s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.153864] env[69475]: DEBUG nova.compute.manager [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 906.156382] env[69475]: DEBUG oslo_concurrency.lockutils [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.585s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.156516] env[69475]: DEBUG nova.objects.instance [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lazy-loading 'resources' on Instance uuid 980bb0eb-121c-4703-a453-fb0b4351e9e3 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 906.242114] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "refresh_cache-9c27dcc3-67df-46ea-947d-b2ecdaeeb003" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.242114] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquired lock "refresh_cache-9c27dcc3-67df-46ea-947d-b2ecdaeeb003" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.242315] env[69475]: DEBUG nova.network.neutron [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 906.289041] env[69475]: DEBUG nova.compute.manager [req-00641edc-fb35-472e-add7-1eb88ea16d57 req-e1dd7c66-c02c-4935-8457-7cd562ec99fe service nova] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Received event network-changed-277b3f9d-a1c5-4f1b-be8a-4818987fd78e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 906.289210] env[69475]: DEBUG nova.compute.manager [req-00641edc-fb35-472e-add7-1eb88ea16d57 req-e1dd7c66-c02c-4935-8457-7cd562ec99fe service nova] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Refreshing instance network info cache due to event network-changed-277b3f9d-a1c5-4f1b-be8a-4818987fd78e. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 906.289650] env[69475]: DEBUG oslo_concurrency.lockutils [req-00641edc-fb35-472e-add7-1eb88ea16d57 req-e1dd7c66-c02c-4935-8457-7cd562ec99fe service nova] Acquiring lock "refresh_cache-f40aa0bb-af1d-4f8f-a906-f1c83307b465" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.289932] env[69475]: DEBUG oslo_concurrency.lockutils [req-00641edc-fb35-472e-add7-1eb88ea16d57 req-e1dd7c66-c02c-4935-8457-7cd562ec99fe service nova] Acquired lock "refresh_cache-f40aa0bb-af1d-4f8f-a906-f1c83307b465" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.290220] env[69475]: DEBUG nova.network.neutron [req-00641edc-fb35-472e-add7-1eb88ea16d57 req-e1dd7c66-c02c-4935-8457-7cd562ec99fe service nova] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Refreshing network info cache for port 277b3f9d-a1c5-4f1b-be8a-4818987fd78e {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 906.340111] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508330, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.348571] env[69475]: INFO nova.compute.manager [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Took 45.80 seconds to build instance. [ 906.381504] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5215c414-608f-2068-ce58-910621199358, 'name': SearchDatastore_Task, 'duration_secs': 0.013109} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.381836] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 906.384418] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] a3ee83aa-f753-49e3-9db2-b1b67d6d211e/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk. {{(pid=69475) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 906.384886] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0e7ac418-54db-456f-80e5-34e642a3acc1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.395848] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 906.395848] env[69475]: value = "task-3508331" [ 906.395848] env[69475]: _type = "Task" [ 906.395848] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.408736] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508331, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.663977] env[69475]: DEBUG nova.compute.utils [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 906.666610] env[69475]: DEBUG nova.compute.manager [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 906.667369] env[69475]: DEBUG nova.network.neutron [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 906.839640] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Acquiring lock "02ba199b-a7dc-421c-a14a-b562da275377" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.839907] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Lock "02ba199b-a7dc-421c-a14a-b562da275377" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.841821] env[69475]: DEBUG nova.network.neutron [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 906.845058] env[69475]: DEBUG nova.policy [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fc7e62ede743400197923eebd7318481', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '72b480b7835d47a18d77bfe4a983f017', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 906.852285] env[69475]: DEBUG oslo_concurrency.lockutils [None req-737d89a4-f2aa-46f3-bf45-538b24108c71 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Lock "0a65565c-c679-47e5-8606-832fe3876af6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.319s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.859495] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508330, 'name': CreateVM_Task, 'duration_secs': 0.593694} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.859800] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 906.860797] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.861067] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 906.861502] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 906.862500] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c9a5d92-997b-479d-b4f6-eda5ada44b26 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.869384] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 906.869384] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5266f42f-1f89-a2c3-3889-2aa68f5593d2" [ 906.869384] env[69475]: _type = "Task" [ 906.869384] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.880701] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5266f42f-1f89-a2c3-3889-2aa68f5593d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.907815] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508331, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.175222] env[69475]: DEBUG nova.compute.manager [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 907.181628] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abae63b-700c-443b-9447-a52e84e73526 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.191535] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010631b4-dda9-48ac-a60b-1abe17a4092a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.230611] env[69475]: DEBUG nova.network.neutron [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Updating instance_info_cache with network_info: [{"id": "85c87dc2-a1dc-4c52-9f42-7af24dfa8791", "address": "fa:16:3e:ad:27:d1", "network": {"id": "6fbdc01c-f41b-4684-9238-39afb8859a2b", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974902263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72b480b7835d47a18d77bfe4a983f017", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85c87dc2-a1", "ovs_interfaceid": "85c87dc2-a1dc-4c52-9f42-7af24dfa8791", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.232346] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8651e6b-a32e-448a-89ee-92ea1cd0fc4c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.243426] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b008feb-a287-4b22-96ae-53c7ece798fc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.262275] env[69475]: DEBUG nova.compute.provider_tree [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.350324] env[69475]: DEBUG nova.compute.manager [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 907.388706] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5266f42f-1f89-a2c3-3889-2aa68f5593d2, 'name': SearchDatastore_Task, 'duration_secs': 0.05445} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.388958] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.389213] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 907.389519] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.389613] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 907.390201] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 907.390570] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb2647a8-a7a1-4a0f-b535-0b7bb4b5b338 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.414000] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 907.414000] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 907.414623] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508331, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.654016} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.414832] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-753712fe-69ca-4592-8d3e-0d7456f9b69c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.417276] env[69475]: INFO nova.virt.vmwareapi.ds_util [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] a3ee83aa-f753-49e3-9db2-b1b67d6d211e/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk. [ 907.419548] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69cf9f18-4ce8-4ae5-9228-58ed05e8209f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.431889] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 907.431889] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d175ee-c198-4685-2bb2-d00b440da470" [ 907.431889] env[69475]: _type = "Task" [ 907.431889] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.454924] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] a3ee83aa-f753-49e3-9db2-b1b67d6d211e/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 907.463195] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd6670a0-650f-4bc5-accc-4d30ef8b0ed9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.485548] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d175ee-c198-4685-2bb2-d00b440da470, 'name': SearchDatastore_Task, 'duration_secs': 0.032155} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.487884] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 907.487884] env[69475]: value = "task-3508332" [ 907.487884] env[69475]: _type = "Task" [ 907.487884] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.488142] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-573da30f-a2cb-4d22-946a-25e5f7b155d6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.500947] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508332, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.502174] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 907.502174] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527d34f9-034e-5fb1-ceb8-cc749074c343" [ 907.502174] env[69475]: _type = "Task" [ 907.502174] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.511996] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527d34f9-034e-5fb1-ceb8-cc749074c343, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.736896] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Releasing lock "refresh_cache-9c27dcc3-67df-46ea-947d-b2ecdaeeb003" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 907.737298] env[69475]: DEBUG nova.compute.manager [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Instance network_info: |[{"id": "85c87dc2-a1dc-4c52-9f42-7af24dfa8791", "address": "fa:16:3e:ad:27:d1", "network": {"id": "6fbdc01c-f41b-4684-9238-39afb8859a2b", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974902263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72b480b7835d47a18d77bfe4a983f017", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85c87dc2-a1", "ovs_interfaceid": "85c87dc2-a1dc-4c52-9f42-7af24dfa8791", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 907.737796] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:27:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '72781990-3cb3-42eb-9eb1-4040dedbf66f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '85c87dc2-a1dc-4c52-9f42-7af24dfa8791', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 907.746154] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 907.746852] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 907.747128] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94c8e95c-88cf-4a70-99e4-e7db38a740aa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.766363] env[69475]: DEBUG nova.network.neutron [req-00641edc-fb35-472e-add7-1eb88ea16d57 req-e1dd7c66-c02c-4935-8457-7cd562ec99fe service nova] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Updated VIF entry in instance network info cache for port 277b3f9d-a1c5-4f1b-be8a-4818987fd78e. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 907.766975] env[69475]: DEBUG nova.network.neutron [req-00641edc-fb35-472e-add7-1eb88ea16d57 req-e1dd7c66-c02c-4935-8457-7cd562ec99fe service nova] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Updating instance_info_cache with network_info: [{"id": "277b3f9d-a1c5-4f1b-be8a-4818987fd78e", "address": "fa:16:3e:94:d7:8c", "network": {"id": "6fbdc01c-f41b-4684-9238-39afb8859a2b", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974902263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72b480b7835d47a18d77bfe4a983f017", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap277b3f9d-a1", "ovs_interfaceid": "277b3f9d-a1c5-4f1b-be8a-4818987fd78e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.771787] env[69475]: DEBUG nova.scheduler.client.report [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 907.784606] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 907.784606] env[69475]: value = "task-3508333" [ 907.784606] env[69475]: _type = "Task" [ 907.784606] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.792963] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508333, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.864243] env[69475]: INFO nova.compute.manager [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Rebuilding instance [ 907.900025] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.940271] env[69475]: DEBUG nova.compute.manager [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 907.941415] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce12bb8b-0a4e-4bd0-8c7f-2eb1df30f73e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.002611] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508332, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.015346] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527d34f9-034e-5fb1-ceb8-cc749074c343, 'name': SearchDatastore_Task, 'duration_secs': 0.01627} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.015346] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.015346] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] f40aa0bb-af1d-4f8f-a906-f1c83307b465/f40aa0bb-af1d-4f8f-a906-f1c83307b465.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 908.015346] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-782da257-013a-4ccc-b4a6-198f75158482 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.022193] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 908.022193] env[69475]: value = "task-3508334" [ 908.022193] env[69475]: _type = "Task" [ 908.022193] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.033956] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508334, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.045688] env[69475]: DEBUG nova.network.neutron [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Successfully created port: 1176c458-2328-4179-b0d0-cbcea8175e66 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 908.193887] env[69475]: DEBUG nova.compute.manager [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 908.230055] env[69475]: DEBUG nova.virt.hardware [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 908.231848] env[69475]: DEBUG nova.virt.hardware [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 908.232280] env[69475]: DEBUG nova.virt.hardware [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 908.232561] env[69475]: DEBUG nova.virt.hardware [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 908.232732] env[69475]: DEBUG nova.virt.hardware [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 908.232910] env[69475]: DEBUG nova.virt.hardware [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 908.233154] env[69475]: DEBUG nova.virt.hardware [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 908.233315] env[69475]: DEBUG nova.virt.hardware [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 908.233499] env[69475]: DEBUG nova.virt.hardware [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 908.233829] env[69475]: DEBUG nova.virt.hardware [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 908.234349] env[69475]: DEBUG nova.virt.hardware [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 908.237702] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97956e9c-f6e1-48f0-9063-f3b22f1eeeb7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.253701] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010820ca-7358-4087-9300-252c888eb593 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.277382] env[69475]: DEBUG oslo_concurrency.lockutils [req-00641edc-fb35-472e-add7-1eb88ea16d57 req-e1dd7c66-c02c-4935-8457-7cd562ec99fe service nova] Releasing lock "refresh_cache-f40aa0bb-af1d-4f8f-a906-f1c83307b465" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.280681] env[69475]: DEBUG oslo_concurrency.lockutils [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.122s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.281608] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.847s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.283400] env[69475]: INFO nova.compute.claims [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 908.303648] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508333, 'name': CreateVM_Task, 'duration_secs': 0.407797} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.303648] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 908.304048] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.304927] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 908.304927] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 908.304927] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cb86000-ff68-48c2-8a76-cab2cf8ce226 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.312092] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 908.312092] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a7fd9d-584a-2786-3d88-866e10c8fa78" [ 908.312092] env[69475]: _type = "Task" [ 908.312092] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.321715] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a7fd9d-584a-2786-3d88-866e10c8fa78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.335668] env[69475]: INFO nova.scheduler.client.report [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Deleted allocations for instance 980bb0eb-121c-4703-a453-fb0b4351e9e3 [ 908.494272] env[69475]: DEBUG nova.compute.manager [req-43191dfd-0e42-47d3-8ab5-cdc9796f3280 req-fb0dc6f8-32c7-4bea-b6c1-2fb882e7a3a5 service nova] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Received event network-changed-85c87dc2-a1dc-4c52-9f42-7af24dfa8791 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 908.494502] env[69475]: DEBUG nova.compute.manager [req-43191dfd-0e42-47d3-8ab5-cdc9796f3280 req-fb0dc6f8-32c7-4bea-b6c1-2fb882e7a3a5 service nova] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Refreshing instance network info cache due to event network-changed-85c87dc2-a1dc-4c52-9f42-7af24dfa8791. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 908.495288] env[69475]: DEBUG oslo_concurrency.lockutils [req-43191dfd-0e42-47d3-8ab5-cdc9796f3280 req-fb0dc6f8-32c7-4bea-b6c1-2fb882e7a3a5 service nova] Acquiring lock "refresh_cache-9c27dcc3-67df-46ea-947d-b2ecdaeeb003" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.495288] env[69475]: DEBUG oslo_concurrency.lockutils [req-43191dfd-0e42-47d3-8ab5-cdc9796f3280 req-fb0dc6f8-32c7-4bea-b6c1-2fb882e7a3a5 service nova] Acquired lock "refresh_cache-9c27dcc3-67df-46ea-947d-b2ecdaeeb003" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 908.495288] env[69475]: DEBUG nova.network.neutron [req-43191dfd-0e42-47d3-8ab5-cdc9796f3280 req-fb0dc6f8-32c7-4bea-b6c1-2fb882e7a3a5 service nova] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Refreshing network info cache for port 85c87dc2-a1dc-4c52-9f42-7af24dfa8791 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 908.510315] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508332, 'name': ReconfigVM_Task, 'duration_secs': 0.657989} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.510315] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Reconfigured VM instance instance-00000045 to attach disk [datastore1] a3ee83aa-f753-49e3-9db2-b1b67d6d211e/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 908.511042] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf970f1-5d1e-4f28-9005-b66b9d3510cd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.546283] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8f482ce-1f12-42d5-be39-ad5fc250425a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.563975] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508334, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.565534] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 908.565534] env[69475]: value = "task-3508335" [ 908.565534] env[69475]: _type = "Task" [ 908.565534] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.574235] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508335, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.822876] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a7fd9d-584a-2786-3d88-866e10c8fa78, 'name': SearchDatastore_Task, 'duration_secs': 0.060381} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.823710] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 908.823710] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 908.823915] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.823915] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 908.824089] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 908.824362] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8442f9fe-4675-453a-bfdb-23a2f9071694 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.833449] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 908.833636] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 908.834368] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fa8876a-c42b-4162-99d4-68179535663f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.839769] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 908.839769] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522c9fcf-ef65-214c-f5de-8c140064f78b" [ 908.839769] env[69475]: _type = "Task" [ 908.839769] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.849174] env[69475]: DEBUG oslo_concurrency.lockutils [None req-403b65bd-71ba-4fcc-8e97-73c77d7c9497 tempest-ImagesOneServerNegativeTestJSON-1810634108 tempest-ImagesOneServerNegativeTestJSON-1810634108-project-member] Lock "980bb0eb-121c-4703-a453-fb0b4351e9e3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.423s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.856603] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522c9fcf-ef65-214c-f5de-8c140064f78b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.962404] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 908.962404] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6aaa0e35-b963-432e-baad-785497178cdf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.970032] env[69475]: DEBUG oslo_vmware.api [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Waiting for the task: (returnval){ [ 908.970032] env[69475]: value = "task-3508336" [ 908.970032] env[69475]: _type = "Task" [ 908.970032] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.978642] env[69475]: DEBUG oslo_vmware.api [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508336, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.049893] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508334, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.564198} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.050581] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] f40aa0bb-af1d-4f8f-a906-f1c83307b465/f40aa0bb-af1d-4f8f-a906-f1c83307b465.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 909.050926] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 909.051262] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f4e92fe4-cf99-47f3-a91e-b5957e205c19 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.058625] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 909.058625] env[69475]: value = "task-3508337" [ 909.058625] env[69475]: _type = "Task" [ 909.058625] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.067521] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508337, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.079917] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508335, 'name': ReconfigVM_Task, 'duration_secs': 0.229571} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.080247] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 909.087960] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5509fd87-e9af-42e6-8521-52b49576d560 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.094529] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 909.094529] env[69475]: value = "task-3508338" [ 909.094529] env[69475]: _type = "Task" [ 909.094529] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.102415] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508338, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.350570] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522c9fcf-ef65-214c-f5de-8c140064f78b, 'name': SearchDatastore_Task, 'duration_secs': 0.015017} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.351700] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f2fe5f5-5c15-4eda-a77b-cc241826a37c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.360516] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 909.360516] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ca94fb-2a52-9f53-9d5c-3910550791e0" [ 909.360516] env[69475]: _type = "Task" [ 909.360516] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.366728] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ca94fb-2a52-9f53-9d5c-3910550791e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.481346] env[69475]: DEBUG oslo_vmware.api [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508336, 'name': PowerOffVM_Task, 'duration_secs': 0.217907} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.481566] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 909.482286] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 909.482564] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a6e7d559-37a5-4e0f-9112-ed6279290986 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.488846] env[69475]: DEBUG oslo_vmware.api [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Waiting for the task: (returnval){ [ 909.488846] env[69475]: value = "task-3508339" [ 909.488846] env[69475]: _type = "Task" [ 909.488846] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.503472] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] VM already powered off {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 909.503739] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 909.503961] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700950', 'volume_id': '44671911-bc3c-459e-8572-d2ff086a0071', 'name': 'volume-44671911-bc3c-459e-8572-d2ff086a0071', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8cc0636c-84af-4f68-bec8-1493b421a605', 'attached_at': '', 'detached_at': '', 'volume_id': '44671911-bc3c-459e-8572-d2ff086a0071', 'serial': '44671911-bc3c-459e-8572-d2ff086a0071'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 909.505267] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ae4506-6ee9-467e-866a-7cea3b258c73 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.528662] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acf95069-f68e-40e4-84b6-aada05450ccc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.538201] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580daa18-b4ee-410e-b784-871aac99abf6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.565984] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4292cd-b2d0-4d8f-a579-c3b068d8f585 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.586189] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] The volume has not been displaced from its original location: [datastore1] volume-44671911-bc3c-459e-8572-d2ff086a0071/volume-44671911-bc3c-459e-8572-d2ff086a0071.vmdk. No consolidation needed. {{(pid=69475) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 909.592629] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Reconfiguring VM instance instance-0000003a to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 909.599058] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e240a768-07d3-41fa-b417-3f1e9fffbcc6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.612918] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508337, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065621} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.619198] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 909.620471] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02031b8-2efb-4c45-a18b-9168e8980de0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.625257] env[69475]: DEBUG oslo_vmware.api [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Waiting for the task: (returnval){ [ 909.625257] env[69475]: value = "task-3508340" [ 909.625257] env[69475]: _type = "Task" [ 909.625257] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.644875] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] f40aa0bb-af1d-4f8f-a906-f1c83307b465/f40aa0bb-af1d-4f8f-a906-f1c83307b465.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 909.654790] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d997e5f-025d-4535-a4aa-c24171218ca3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.669680] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508338, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.675705] env[69475]: DEBUG oslo_vmware.api [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508340, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.677159] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 909.677159] env[69475]: value = "task-3508341" [ 909.677159] env[69475]: _type = "Task" [ 909.677159] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.685777] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508341, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.784042] env[69475]: DEBUG nova.network.neutron [req-43191dfd-0e42-47d3-8ab5-cdc9796f3280 req-fb0dc6f8-32c7-4bea-b6c1-2fb882e7a3a5 service nova] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Updated VIF entry in instance network info cache for port 85c87dc2-a1dc-4c52-9f42-7af24dfa8791. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 909.784416] env[69475]: DEBUG nova.network.neutron [req-43191dfd-0e42-47d3-8ab5-cdc9796f3280 req-fb0dc6f8-32c7-4bea-b6c1-2fb882e7a3a5 service nova] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Updating instance_info_cache with network_info: [{"id": "85c87dc2-a1dc-4c52-9f42-7af24dfa8791", "address": "fa:16:3e:ad:27:d1", "network": {"id": "6fbdc01c-f41b-4684-9238-39afb8859a2b", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974902263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72b480b7835d47a18d77bfe4a983f017", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap85c87dc2-a1", "ovs_interfaceid": "85c87dc2-a1dc-4c52-9f42-7af24dfa8791", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.870848] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ca94fb-2a52-9f53-9d5c-3910550791e0, 'name': SearchDatastore_Task, 'duration_secs': 0.059046} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.871239] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 909.871509] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 9c27dcc3-67df-46ea-947d-b2ecdaeeb003/9c27dcc3-67df-46ea-947d-b2ecdaeeb003.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 909.871780] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0a406155-948c-4f08-93dd-b38fc3f8f271 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.878661] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 909.878661] env[69475]: value = "task-3508342" [ 909.878661] env[69475]: _type = "Task" [ 909.878661] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.888057] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508342, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.894451] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d046e05-6712-4996-a186-59cd1d5b6ad3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.902368] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b950e8-83be-4f62-aa14-e8b9a28bb0e5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.937275] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee9bbd33-7f6a-4702-aa9d-38e8736190e6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.946373] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2b231e-ae0d-4ddb-99e7-6f366610c8cd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.961195] env[69475]: DEBUG nova.compute.provider_tree [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.109358] env[69475]: DEBUG oslo_vmware.api [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508338, 'name': PowerOnVM_Task, 'duration_secs': 0.92619} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.109358] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 910.111864] env[69475]: DEBUG nova.compute.manager [None req-992b746d-c38b-420a-a87e-97e4ab7c8130 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 910.112761] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c70fefa6-d511-4312-92d4-e3cad3004552 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.145602] env[69475]: DEBUG oslo_vmware.api [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508340, 'name': ReconfigVM_Task, 'duration_secs': 0.163213} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.146683] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Reconfigured VM instance instance-0000003a to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 910.157328] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b7308f6-671c-4275-a112-ca0befe97525 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.176342] env[69475]: DEBUG oslo_vmware.api [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Waiting for the task: (returnval){ [ 910.176342] env[69475]: value = "task-3508343" [ 910.176342] env[69475]: _type = "Task" [ 910.176342] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.189145] env[69475]: DEBUG oslo_vmware.api [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508343, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.195794] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508341, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.287985] env[69475]: DEBUG oslo_concurrency.lockutils [req-43191dfd-0e42-47d3-8ab5-cdc9796f3280 req-fb0dc6f8-32c7-4bea-b6c1-2fb882e7a3a5 service nova] Releasing lock "refresh_cache-9c27dcc3-67df-46ea-947d-b2ecdaeeb003" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 910.389833] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508342, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.468437] env[69475]: DEBUG nova.scheduler.client.report [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 910.694068] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508341, 'name': ReconfigVM_Task, 'duration_secs': 0.683848} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.702674] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Reconfigured VM instance instance-00000048 to attach disk [datastore1] f40aa0bb-af1d-4f8f-a906-f1c83307b465/f40aa0bb-af1d-4f8f-a906-f1c83307b465.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 910.702674] env[69475]: DEBUG oslo_vmware.api [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508343, 'name': ReconfigVM_Task, 'duration_secs': 0.188811} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.702674] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-137da1d1-dc34-47df-88b5-e0414ba2f292 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.702674] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-700950', 'volume_id': '44671911-bc3c-459e-8572-d2ff086a0071', 'name': 'volume-44671911-bc3c-459e-8572-d2ff086a0071', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '8cc0636c-84af-4f68-bec8-1493b421a605', 'attached_at': '', 'detached_at': '', 'volume_id': '44671911-bc3c-459e-8572-d2ff086a0071', 'serial': '44671911-bc3c-459e-8572-d2ff086a0071'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 910.702674] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 910.702674] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86963019-78bc-4ff4-b44b-6b4bdc11563e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.709236] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 910.710419] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0709a0da-5116-45c9-bdaf-00b173834d8d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.711810] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 910.711810] env[69475]: value = "task-3508344" [ 910.711810] env[69475]: _type = "Task" [ 910.711810] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.746544] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508344, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.781444] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 910.781669] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 910.781854] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Deleting the datastore file [datastore1] 8cc0636c-84af-4f68-bec8-1493b421a605 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 910.782306] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de5f7a9a-3cac-4ad2-8c01-ad5196973b46 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.796432] env[69475]: DEBUG oslo_vmware.api [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Waiting for the task: (returnval){ [ 910.796432] env[69475]: value = "task-3508346" [ 910.796432] env[69475]: _type = "Task" [ 910.796432] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.808861] env[69475]: DEBUG oslo_vmware.api [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508346, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.889106] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508342, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.716232} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.889669] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 9c27dcc3-67df-46ea-947d-b2ecdaeeb003/9c27dcc3-67df-46ea-947d-b2ecdaeeb003.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 910.890088] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 910.890490] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7d393aa7-8f11-4d77-93a9-f578c859fb5a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.896391] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 910.896391] env[69475]: value = "task-3508347" [ 910.896391] env[69475]: _type = "Task" [ 910.896391] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.906310] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508347, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.981135] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.699s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.982020] env[69475]: DEBUG nova.compute.manager [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 910.990481] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.494s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.990481] env[69475]: DEBUG nova.objects.instance [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lazy-loading 'resources' on Instance uuid d1e5e08d-b41a-4655-997d-91fbd3581f00 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 911.231252] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508344, 'name': Rename_Task, 'duration_secs': 0.183035} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.234688] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 911.234688] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8a6b20c-7805-4da5-a716-e57a7ed9b50d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.240638] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 911.240638] env[69475]: value = "task-3508348" [ 911.240638] env[69475]: _type = "Task" [ 911.240638] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.250994] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508348, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.310214] env[69475]: DEBUG oslo_vmware.api [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Task: {'id': task-3508346, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096996} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.310284] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 911.310531] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 911.311424] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 911.419436] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 911.426862] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-68519509-7812-4714-b284-6f401294889b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.429414] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508347, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071709} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.429822] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 911.431180] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507fdb17-3baa-4e7e-94a2-79020e811837 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.443156] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20765055-19ca-422a-9e61-04a17c045b3f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.482772] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 9c27dcc3-67df-46ea-947d-b2ecdaeeb003/9c27dcc3-67df-46ea-947d-b2ecdaeeb003.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 911.483911] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-511955a4-bd1e-41f0-a7b6-d8089700150b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.520099] env[69475]: DEBUG nova.compute.utils [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 911.529080] env[69475]: ERROR nova.compute.manager [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Failed to detach volume 44671911-bc3c-459e-8572-d2ff086a0071 from /dev/sda: nova.exception.InstanceNotFound: Instance 8cc0636c-84af-4f68-bec8-1493b421a605 could not be found. [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Traceback (most recent call last): [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] self.driver.rebuild(**kwargs) [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] raise NotImplementedError() [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] NotImplementedError [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] During handling of the above exception, another exception occurred: [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Traceback (most recent call last): [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] self.driver.detach_volume(context, old_connection_info, [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] return self._volumeops.detach_volume(connection_info, instance) [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] self._detach_volume_vmdk(connection_info, instance) [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] stable_ref.fetch_moref(session) [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] raise exception.InstanceNotFound(instance_id=self._uuid) [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] nova.exception.InstanceNotFound: Instance 8cc0636c-84af-4f68-bec8-1493b421a605 could not be found. [ 911.529080] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] [ 911.531070] env[69475]: DEBUG nova.compute.manager [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 911.531070] env[69475]: DEBUG nova.network.neutron [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 911.539914] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 911.539914] env[69475]: value = "task-3508349" [ 911.539914] env[69475]: _type = "Task" [ 911.539914] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.556198] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508349, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.593499] env[69475]: DEBUG nova.network.neutron [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Successfully updated port: 1176c458-2328-4179-b0d0-cbcea8175e66 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 911.642601] env[69475]: DEBUG nova.policy [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f164f821924c4f4aae565d7352fef4a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8ffeef220f04d9eb22ef69b68e9c34a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 911.752413] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508348, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.781207] env[69475]: DEBUG nova.compute.manager [req-79c216be-061c-44d7-b03d-b5b578aa2cdf req-8aba30c3-b770-499f-8b01-d71be7a79145 service nova] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Received event network-vif-plugged-1176c458-2328-4179-b0d0-cbcea8175e66 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 911.781207] env[69475]: DEBUG oslo_concurrency.lockutils [req-79c216be-061c-44d7-b03d-b5b578aa2cdf req-8aba30c3-b770-499f-8b01-d71be7a79145 service nova] Acquiring lock "4b17d080-594b-44e7-83aa-ebe0787722d9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.781207] env[69475]: DEBUG oslo_concurrency.lockutils [req-79c216be-061c-44d7-b03d-b5b578aa2cdf req-8aba30c3-b770-499f-8b01-d71be7a79145 service nova] Lock "4b17d080-594b-44e7-83aa-ebe0787722d9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.781207] env[69475]: DEBUG oslo_concurrency.lockutils [req-79c216be-061c-44d7-b03d-b5b578aa2cdf req-8aba30c3-b770-499f-8b01-d71be7a79145 service nova] Lock "4b17d080-594b-44e7-83aa-ebe0787722d9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.781207] env[69475]: DEBUG nova.compute.manager [req-79c216be-061c-44d7-b03d-b5b578aa2cdf req-8aba30c3-b770-499f-8b01-d71be7a79145 service nova] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] No waiting events found dispatching network-vif-plugged-1176c458-2328-4179-b0d0-cbcea8175e66 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 911.781207] env[69475]: WARNING nova.compute.manager [req-79c216be-061c-44d7-b03d-b5b578aa2cdf req-8aba30c3-b770-499f-8b01-d71be7a79145 service nova] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Received unexpected event network-vif-plugged-1176c458-2328-4179-b0d0-cbcea8175e66 for instance with vm_state building and task_state spawning. [ 911.796783] env[69475]: DEBUG nova.compute.utils [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Build of instance 8cc0636c-84af-4f68-bec8-1493b421a605 aborted: Failed to rebuild volume backed instance. {{(pid=69475) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 911.799267] env[69475]: ERROR nova.compute.manager [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 8cc0636c-84af-4f68-bec8-1493b421a605 aborted: Failed to rebuild volume backed instance. [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Traceback (most recent call last): [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] self.driver.rebuild(**kwargs) [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] raise NotImplementedError() [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] NotImplementedError [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] During handling of the above exception, another exception occurred: [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Traceback (most recent call last): [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] self._detach_root_volume(context, instance, root_bdm) [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] with excutils.save_and_reraise_exception(): [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] self.force_reraise() [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] raise self.value [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] self.driver.detach_volume(context, old_connection_info, [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] return self._volumeops.detach_volume(connection_info, instance) [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] self._detach_volume_vmdk(connection_info, instance) [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] stable_ref.fetch_moref(session) [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] raise exception.InstanceNotFound(instance_id=self._uuid) [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] nova.exception.InstanceNotFound: Instance 8cc0636c-84af-4f68-bec8-1493b421a605 could not be found. [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] During handling of the above exception, another exception occurred: [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Traceback (most recent call last): [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/compute/manager.py", line 11471, in _error_out_instance_on_exception [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] yield [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 911.799267] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] self._do_rebuild_instance_with_claim( [ 911.800823] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 911.800823] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] self._do_rebuild_instance( [ 911.800823] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 911.800823] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] self._rebuild_default_impl(**kwargs) [ 911.800823] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 911.800823] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] self._rebuild_volume_backed_instance( [ 911.800823] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 911.800823] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] raise exception.BuildAbortException( [ 911.800823] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] nova.exception.BuildAbortException: Build of instance 8cc0636c-84af-4f68-bec8-1493b421a605 aborted: Failed to rebuild volume backed instance. [ 911.800823] env[69475]: ERROR nova.compute.manager [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] [ 911.922767] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Acquiring lock "0a65565c-c679-47e5-8606-832fe3876af6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.923041] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Lock "0a65565c-c679-47e5-8606-832fe3876af6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.923251] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Acquiring lock "0a65565c-c679-47e5-8606-832fe3876af6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 911.924765] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Lock "0a65565c-c679-47e5-8606-832fe3876af6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 911.924765] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Lock "0a65565c-c679-47e5-8606-832fe3876af6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 911.927837] env[69475]: INFO nova.compute.manager [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Terminating instance [ 912.031651] env[69475]: DEBUG nova.compute.manager [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 912.055493] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508349, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.057658] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf364032-80e0-4fa7-bb9e-b41ad9a098f3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.069836] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f01ed42-b0c5-4fd9-a9ba-d4d71b21780f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.104869] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "refresh_cache-4b17d080-594b-44e7-83aa-ebe0787722d9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.104990] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquired lock "refresh_cache-4b17d080-594b-44e7-83aa-ebe0787722d9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.105181] env[69475]: DEBUG nova.network.neutron [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 912.107687] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2268217b-200d-4291-95b9-67da2aababa6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.117652] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de93cc99-c644-4515-a106-251268935ce5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.135111] env[69475]: DEBUG nova.compute.provider_tree [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.253128] env[69475]: DEBUG oslo_vmware.api [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508348, 'name': PowerOnVM_Task, 'duration_secs': 0.873526} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.253429] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 912.253652] env[69475]: INFO nova.compute.manager [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Took 11.56 seconds to spawn the instance on the hypervisor. [ 912.254339] env[69475]: DEBUG nova.compute.manager [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 912.255343] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45bd07f4-b682-45eb-8152-1370db536bba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.258706] env[69475]: DEBUG nova.network.neutron [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Successfully created port: b12c0816-a102-4eeb-b8bd-bc03636b077f {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 912.404056] env[69475]: INFO nova.compute.manager [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Unrescuing [ 912.406731] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "refresh_cache-a3ee83aa-f753-49e3-9db2-b1b67d6d211e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.406731] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquired lock "refresh_cache-a3ee83aa-f753-49e3-9db2-b1b67d6d211e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.406731] env[69475]: DEBUG nova.network.neutron [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 912.439073] env[69475]: DEBUG nova.compute.manager [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 912.439073] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 912.439073] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c34568b-cd18-48c1-b457-68bbc703f3b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.447741] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 912.447741] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-26bcc6d7-0a02-4710-95a5-e15a789db2af {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.457304] env[69475]: DEBUG oslo_vmware.api [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Waiting for the task: (returnval){ [ 912.457304] env[69475]: value = "task-3508350" [ 912.457304] env[69475]: _type = "Task" [ 912.457304] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.469922] env[69475]: DEBUG oslo_vmware.api [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': task-3508350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.556965] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508349, 'name': ReconfigVM_Task, 'duration_secs': 0.652254} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.556965] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 9c27dcc3-67df-46ea-947d-b2ecdaeeb003/9c27dcc3-67df-46ea-947d-b2ecdaeeb003.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 912.557520] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c6b32520-16ff-4f20-aa12-148bc128e0cc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.564609] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 912.564609] env[69475]: value = "task-3508351" [ 912.564609] env[69475]: _type = "Task" [ 912.564609] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.581226] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508351, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.638834] env[69475]: DEBUG nova.scheduler.client.report [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 912.669720] env[69475]: DEBUG nova.network.neutron [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 912.778959] env[69475]: INFO nova.compute.manager [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Took 31.79 seconds to build instance. [ 912.949193] env[69475]: DEBUG nova.network.neutron [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Updating instance_info_cache with network_info: [{"id": "1176c458-2328-4179-b0d0-cbcea8175e66", "address": "fa:16:3e:3f:dc:4c", "network": {"id": "6fbdc01c-f41b-4684-9238-39afb8859a2b", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974902263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72b480b7835d47a18d77bfe4a983f017", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1176c458-23", "ovs_interfaceid": "1176c458-2328-4179-b0d0-cbcea8175e66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.973254] env[69475]: DEBUG oslo_vmware.api [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': task-3508350, 'name': PowerOffVM_Task, 'duration_secs': 0.288103} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.973703] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 912.974018] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 912.974368] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c77a92e-4121-41d6-8ac8-71842ef03e88 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.049426] env[69475]: DEBUG nova.compute.manager [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 913.058391] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 913.058872] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 913.058872] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Deleting the datastore file [datastore1] 0a65565c-c679-47e5-8606-832fe3876af6 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 913.059232] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f25e933e-5e21-4649-bda6-8c6f76dcc90f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.074373] env[69475]: DEBUG oslo_vmware.api [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Waiting for the task: (returnval){ [ 913.074373] env[69475]: value = "task-3508353" [ 913.074373] env[69475]: _type = "Task" [ 913.074373] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.078410] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508351, 'name': Rename_Task, 'duration_secs': 0.18308} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.080385] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 913.080385] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-95cde532-e694-43f4-aea7-c046fcdee3de {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.088967] env[69475]: DEBUG oslo_vmware.api [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': task-3508353, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.096395] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 913.096395] env[69475]: value = "task-3508354" [ 913.096395] env[69475]: _type = "Task" [ 913.096395] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.106488] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508354, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.111094] env[69475]: DEBUG nova.virt.hardware [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 913.111094] env[69475]: DEBUG nova.virt.hardware [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 913.111260] env[69475]: DEBUG nova.virt.hardware [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 913.111415] env[69475]: DEBUG nova.virt.hardware [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 913.111558] env[69475]: DEBUG nova.virt.hardware [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 913.111703] env[69475]: DEBUG nova.virt.hardware [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 913.111915] env[69475]: DEBUG nova.virt.hardware [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 913.112100] env[69475]: DEBUG nova.virt.hardware [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 913.112272] env[69475]: DEBUG nova.virt.hardware [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 913.112445] env[69475]: DEBUG nova.virt.hardware [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 913.112636] env[69475]: DEBUG nova.virt.hardware [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 913.114035] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600b1891-8c74-4823-8fb8-a923e916b936 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.122807] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2640812-f810-4fdb-9323-b5113f63332e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.146278] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.160s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.150435] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.111s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.152180] env[69475]: INFO nova.compute.claims [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 913.191151] env[69475]: INFO nova.scheduler.client.report [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Deleted allocations for instance d1e5e08d-b41a-4655-997d-91fbd3581f00 [ 913.284748] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bc4bb88f-e929-4238-90c8-01d3d2c11990 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "f40aa0bb-af1d-4f8f-a906-f1c83307b465" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.307s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.335919] env[69475]: DEBUG nova.network.neutron [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Updating instance_info_cache with network_info: [{"id": "4246ca40-af00-4315-b24a-c4e3217dfdb2", "address": "fa:16:3e:0b:b8:24", "network": {"id": "b2ee7427-b6b5-4fb8-acdf-fa1d5ecaaeb1", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-464853755-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "ef7fb53bce6145da8fe1e2f8beb57807", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "457c42cd-4ddb-4374-923e-d419b7f6eaff", "external-id": "nsx-vlan-transportzone-575", "segmentation_id": 575, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4246ca40-af", "ovs_interfaceid": "4246ca40-af00-4315-b24a-c4e3217dfdb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.453619] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Releasing lock "refresh_cache-4b17d080-594b-44e7-83aa-ebe0787722d9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.458256] env[69475]: DEBUG nova.compute.manager [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Instance network_info: |[{"id": "1176c458-2328-4179-b0d0-cbcea8175e66", "address": "fa:16:3e:3f:dc:4c", "network": {"id": "6fbdc01c-f41b-4684-9238-39afb8859a2b", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974902263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72b480b7835d47a18d77bfe4a983f017", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1176c458-23", "ovs_interfaceid": "1176c458-2328-4179-b0d0-cbcea8175e66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 913.458256] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:dc:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '72781990-3cb3-42eb-9eb1-4040dedbf66f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1176c458-2328-4179-b0d0-cbcea8175e66', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 913.467599] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 913.467897] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 913.468162] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c611239-7f52-4f6e-b883-bdb65004882b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.506923] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 913.506923] env[69475]: value = "task-3508355" [ 913.506923] env[69475]: _type = "Task" [ 913.506923] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.518879] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508355, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.591951] env[69475]: DEBUG oslo_vmware.api [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Task: {'id': task-3508353, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.299514} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.591951] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 913.593462] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 913.593623] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 913.594067] env[69475]: INFO nova.compute.manager [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Took 1.16 seconds to destroy the instance on the hypervisor. [ 913.594155] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 913.594785] env[69475]: DEBUG nova.compute.manager [-] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 913.594884] env[69475]: DEBUG nova.network.neutron [-] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 913.613966] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508354, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.708809] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d9707a80-2c8a-4d39-acb3-e8fe4eb8a970 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "d1e5e08d-b41a-4655-997d-91fbd3581f00" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.885s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.719110] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Acquiring lock "d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.719110] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Lock "d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.825663] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 913.839893] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Releasing lock "refresh_cache-a3ee83aa-f753-49e3-9db2-b1b67d6d211e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.840893] env[69475]: DEBUG nova.objects.instance [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lazy-loading 'flavor' on Instance uuid a3ee83aa-f753-49e3-9db2-b1b67d6d211e {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 914.018765] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508355, 'name': CreateVM_Task, 'duration_secs': 0.340027} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.019078] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 914.019990] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.020485] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.020955] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 914.021636] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df700bc1-af7b-42c4-8767-69454251a5a0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.028845] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 914.028845] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52079c16-4b79-52e0-b4d5-ef986f762c4f" [ 914.028845] env[69475]: _type = "Task" [ 914.028845] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.038639] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52079c16-4b79-52e0-b4d5-ef986f762c4f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.111149] env[69475]: DEBUG oslo_vmware.api [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508354, 'name': PowerOnVM_Task, 'duration_secs': 0.817686} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.111603] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 914.111903] env[69475]: INFO nova.compute.manager [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Took 10.80 seconds to spawn the instance on the hypervisor. [ 914.112208] env[69475]: DEBUG nova.compute.manager [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 914.113157] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641f2c13-1874-4883-88c5-5c8e25eb58b6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.126129] env[69475]: DEBUG nova.compute.manager [req-8f791fcf-8053-4967-8a4f-99d164364406 req-ba33d19d-624e-49a7-9e1c-d7d14541f281 service nova] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Received event network-changed-1176c458-2328-4179-b0d0-cbcea8175e66 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 914.126444] env[69475]: DEBUG nova.compute.manager [req-8f791fcf-8053-4967-8a4f-99d164364406 req-ba33d19d-624e-49a7-9e1c-d7d14541f281 service nova] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Refreshing instance network info cache due to event network-changed-1176c458-2328-4179-b0d0-cbcea8175e66. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 914.126841] env[69475]: DEBUG oslo_concurrency.lockutils [req-8f791fcf-8053-4967-8a4f-99d164364406 req-ba33d19d-624e-49a7-9e1c-d7d14541f281 service nova] Acquiring lock "refresh_cache-4b17d080-594b-44e7-83aa-ebe0787722d9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.127381] env[69475]: DEBUG oslo_concurrency.lockutils [req-8f791fcf-8053-4967-8a4f-99d164364406 req-ba33d19d-624e-49a7-9e1c-d7d14541f281 service nova] Acquired lock "refresh_cache-4b17d080-594b-44e7-83aa-ebe0787722d9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.127715] env[69475]: DEBUG nova.network.neutron [req-8f791fcf-8053-4967-8a4f-99d164364406 req-ba33d19d-624e-49a7-9e1c-d7d14541f281 service nova] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Refreshing network info cache for port 1176c458-2328-4179-b0d0-cbcea8175e66 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 914.222376] env[69475]: DEBUG nova.compute.manager [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 914.349113] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf89ff8-e9c4-4974-91d1-eeb431c0e7b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.376108] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 914.379073] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1124de1a-96a7-4da7-b53d-637bada5d539 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.388149] env[69475]: DEBUG oslo_vmware.api [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 914.388149] env[69475]: value = "task-3508356" [ 914.388149] env[69475]: _type = "Task" [ 914.388149] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.397334] env[69475]: DEBUG oslo_vmware.api [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508356, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.435393] env[69475]: DEBUG nova.compute.manager [req-46febded-563a-4f0d-9e86-825e9978ad31 req-5c8902ce-d454-4659-9474-9c23ad0b0ada service nova] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Received event network-vif-deleted-08bb22ea-bdd1-4469-b276-1932d3bd682f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 914.435634] env[69475]: INFO nova.compute.manager [req-46febded-563a-4f0d-9e86-825e9978ad31 req-5c8902ce-d454-4659-9474-9c23ad0b0ada service nova] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Neutron deleted interface 08bb22ea-bdd1-4469-b276-1932d3bd682f; detaching it from the instance and deleting it from the info cache [ 914.435795] env[69475]: DEBUG nova.network.neutron [req-46febded-563a-4f0d-9e86-825e9978ad31 req-5c8902ce-d454-4659-9474-9c23ad0b0ada service nova] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.540769] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52079c16-4b79-52e0-b4d5-ef986f762c4f, 'name': SearchDatastore_Task, 'duration_secs': 0.01585} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.544049] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.544309] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 914.544544] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.544692] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.544875] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 914.546411] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ca1e738d-e0e3-4417-87d7-d08a08ed6bfa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.558951] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 914.559797] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 914.560375] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac6aebec-3ab3-43a7-b27c-d894850838cb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.569301] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 914.569301] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524c50e5-550f-9935-8129-43d837606073" [ 914.569301] env[69475]: _type = "Task" [ 914.569301] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.577928] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524c50e5-550f-9935-8129-43d837606073, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.594357] env[69475]: DEBUG nova.network.neutron [-] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.610776] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-449e336a-04d4-4f0e-9ee1-95a1ad7f80b5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.625586] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f44b5e6a-c079-4aab-93ab-b05ee738f9a8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.673412] env[69475]: DEBUG nova.network.neutron [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Successfully updated port: b12c0816-a102-4eeb-b8bd-bc03636b077f {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 914.677656] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e013e5-b4a4-4864-83f9-26a1754f1fb4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.680697] env[69475]: INFO nova.compute.manager [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Took 33.30 seconds to build instance. [ 914.690634] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0676f896-c5d1-44be-8d50-710d64b109ca {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.709288] env[69475]: DEBUG nova.compute.provider_tree [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.754652] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.906021] env[69475]: DEBUG oslo_vmware.api [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508356, 'name': PowerOffVM_Task, 'duration_secs': 0.386352} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.906021] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 914.913777] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Reconfiguring VM instance instance-00000045 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 914.913777] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89f2becb-81cb-4de2-bb0f-92d92dec7ca6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.930898] env[69475]: DEBUG oslo_vmware.api [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 914.930898] env[69475]: value = "task-3508357" [ 914.930898] env[69475]: _type = "Task" [ 914.930898] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.931762] env[69475]: DEBUG nova.network.neutron [req-8f791fcf-8053-4967-8a4f-99d164364406 req-ba33d19d-624e-49a7-9e1c-d7d14541f281 service nova] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Updated VIF entry in instance network info cache for port 1176c458-2328-4179-b0d0-cbcea8175e66. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 914.932118] env[69475]: DEBUG nova.network.neutron [req-8f791fcf-8053-4967-8a4f-99d164364406 req-ba33d19d-624e-49a7-9e1c-d7d14541f281 service nova] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Updating instance_info_cache with network_info: [{"id": "1176c458-2328-4179-b0d0-cbcea8175e66", "address": "fa:16:3e:3f:dc:4c", "network": {"id": "6fbdc01c-f41b-4684-9238-39afb8859a2b", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974902263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72b480b7835d47a18d77bfe4a983f017", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1176c458-23", "ovs_interfaceid": "1176c458-2328-4179-b0d0-cbcea8175e66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.938131] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1ac6a36c-2a86-442f-946e-6d0ab0f963cb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.944685] env[69475]: DEBUG oslo_vmware.api [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508357, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.953689] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbb43c6-b88d-4b13-ac06-a4e306746e93 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.992688] env[69475]: DEBUG nova.compute.manager [req-46febded-563a-4f0d-9e86-825e9978ad31 req-5c8902ce-d454-4659-9474-9c23ad0b0ada service nova] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Detach interface failed, port_id=08bb22ea-bdd1-4469-b276-1932d3bd682f, reason: Instance 0a65565c-c679-47e5-8606-832fe3876af6 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 915.053520] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "a21ec73a-2658-4fc6-9bc1-0e492385d59e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.053857] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "a21ec73a-2658-4fc6-9bc1-0e492385d59e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.054066] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "a21ec73a-2658-4fc6-9bc1-0e492385d59e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.054275] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "a21ec73a-2658-4fc6-9bc1-0e492385d59e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.054938] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "a21ec73a-2658-4fc6-9bc1-0e492385d59e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.058040] env[69475]: INFO nova.compute.manager [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Terminating instance [ 915.081908] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524c50e5-550f-9935-8129-43d837606073, 'name': SearchDatastore_Task, 'duration_secs': 0.010072} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.082929] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-993c68aa-8631-41b6-b436-e258756bd31e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.088886] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 915.088886] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52155e56-cc69-e577-bdfd-1172a13f8074" [ 915.088886] env[69475]: _type = "Task" [ 915.088886] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.098483] env[69475]: INFO nova.compute.manager [-] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Took 1.50 seconds to deallocate network for instance. [ 915.098790] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52155e56-cc69-e577-bdfd-1172a13f8074, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.182057] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "refresh_cache-78430e6a-b0a3-400b-91c4-effea838274a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.182185] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "refresh_cache-78430e6a-b0a3-400b-91c4-effea838274a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.182340] env[69475]: DEBUG nova.network.neutron [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 915.183689] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fa4b5fe-bbfb-4f5b-90da-1c284a637d2d tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "9c27dcc3-67df-46ea-947d-b2ecdaeeb003" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.819s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.213393] env[69475]: DEBUG nova.scheduler.client.report [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 915.300375] env[69475]: DEBUG oslo_concurrency.lockutils [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Acquiring lock "c9b2f701-a73a-4561-b637-62e3ce98a44f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.300375] env[69475]: DEBUG oslo_concurrency.lockutils [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Lock "c9b2f701-a73a-4561-b637-62e3ce98a44f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.438121] env[69475]: DEBUG oslo_concurrency.lockutils [req-8f791fcf-8053-4967-8a4f-99d164364406 req-ba33d19d-624e-49a7-9e1c-d7d14541f281 service nova] Releasing lock "refresh_cache-4b17d080-594b-44e7-83aa-ebe0787722d9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.443717] env[69475]: DEBUG oslo_vmware.api [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508357, 'name': ReconfigVM_Task, 'duration_secs': 0.48658} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.443972] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Reconfigured VM instance instance-00000045 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 915.444293] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 915.444815] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a40b5a49-9aef-4a8e-b310-497cbeb9cfea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.454768] env[69475]: DEBUG oslo_vmware.api [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 915.454768] env[69475]: value = "task-3508358" [ 915.454768] env[69475]: _type = "Task" [ 915.454768] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.463488] env[69475]: DEBUG oslo_vmware.api [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508358, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.560836] env[69475]: DEBUG nova.compute.manager [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 915.561044] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 915.564501] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c4dd0f0-d540-4ed4-8597-198587ea4118 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.573484] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 915.575341] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-456c175d-0cd0-467a-9d12-0fe99bdcca09 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.582017] env[69475]: DEBUG oslo_vmware.api [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 915.582017] env[69475]: value = "task-3508359" [ 915.582017] env[69475]: _type = "Task" [ 915.582017] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.593599] env[69475]: DEBUG oslo_vmware.api [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508359, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.601377] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52155e56-cc69-e577-bdfd-1172a13f8074, 'name': SearchDatastore_Task, 'duration_secs': 0.018548} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.601686] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.601954] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 4b17d080-594b-44e7-83aa-ebe0787722d9/4b17d080-594b-44e7-83aa-ebe0787722d9.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 915.602244] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-73fff7c6-fb66-4fb6-8317-8baa6aaa4517 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.605666] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 915.608742] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 915.608742] env[69475]: value = "task-3508360" [ 915.608742] env[69475]: _type = "Task" [ 915.608742] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.619807] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508360, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.718676] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.568s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.719475] env[69475]: DEBUG nova.compute.manager [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 915.726028] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.837s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.726028] env[69475]: INFO nova.compute.claims [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 915.728611] env[69475]: DEBUG nova.network.neutron [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 915.802866] env[69475]: DEBUG nova.compute.manager [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 915.968988] env[69475]: DEBUG oslo_vmware.api [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508358, 'name': PowerOnVM_Task, 'duration_secs': 0.504089} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.973024] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 915.973024] env[69475]: DEBUG nova.compute.manager [None req-3a0a4c30-51d8-4a4e-93fa-587b417603f4 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 915.973024] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2940a394-e6d3-4bd1-82b1-d5c7e7f5596c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.099971] env[69475]: DEBUG oslo_vmware.api [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508359, 'name': PowerOffVM_Task, 'duration_secs': 0.243786} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.100378] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 916.100910] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 916.101404] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83e8ac69-fd48-4a71-810c-6d22d4fd48f3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.124487] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508360, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.176685] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 916.177064] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 916.177343] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Deleting the datastore file [datastore1] a21ec73a-2658-4fc6-9bc1-0e492385d59e {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 916.177702] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-32da28ab-b306-4d6b-97ef-9b82ea31933c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.190256] env[69475]: DEBUG oslo_vmware.api [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 916.190256] env[69475]: value = "task-3508362" [ 916.190256] env[69475]: _type = "Task" [ 916.190256] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.200784] env[69475]: DEBUG oslo_vmware.api [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508362, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.229795] env[69475]: DEBUG nova.compute.utils [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 916.232432] env[69475]: DEBUG nova.compute.manager [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 916.232658] env[69475]: DEBUG nova.network.neutron [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 916.238106] env[69475]: DEBUG nova.network.neutron [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Updating instance_info_cache with network_info: [{"id": "b12c0816-a102-4eeb-b8bd-bc03636b077f", "address": "fa:16:3e:c4:73:07", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb12c0816-a1", "ovs_interfaceid": "b12c0816-a102-4eeb-b8bd-bc03636b077f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.331562] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Acquiring lock "8cc0636c-84af-4f68-bec8-1493b421a605" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.331971] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Lock "8cc0636c-84af-4f68-bec8-1493b421a605" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.332897] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Acquiring lock "8cc0636c-84af-4f68-bec8-1493b421a605-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.332897] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Lock "8cc0636c-84af-4f68-bec8-1493b421a605-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.332897] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Lock "8cc0636c-84af-4f68-bec8-1493b421a605-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.337565] env[69475]: INFO nova.compute.manager [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Terminating instance [ 916.339864] env[69475]: DEBUG nova.policy [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d4323c195b24245a75109e165f900f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e6dd9c026624896ae4de7fab35720d8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 916.345022] env[69475]: DEBUG oslo_concurrency.lockutils [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.627348] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508360, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600535} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.632067] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 4b17d080-594b-44e7-83aa-ebe0787722d9/4b17d080-594b-44e7-83aa-ebe0787722d9.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 916.632410] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 916.633740] env[69475]: DEBUG nova.compute.manager [req-7fc4ccab-7f1a-4ed1-9d8d-92f5ddf7b985 req-5b3e6a1b-74ec-4cf7-9581-c850ad3d023f service nova] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Received event network-vif-plugged-b12c0816-a102-4eeb-b8bd-bc03636b077f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 916.633932] env[69475]: DEBUG oslo_concurrency.lockutils [req-7fc4ccab-7f1a-4ed1-9d8d-92f5ddf7b985 req-5b3e6a1b-74ec-4cf7-9581-c850ad3d023f service nova] Acquiring lock "78430e6a-b0a3-400b-91c4-effea838274a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.634160] env[69475]: DEBUG oslo_concurrency.lockutils [req-7fc4ccab-7f1a-4ed1-9d8d-92f5ddf7b985 req-5b3e6a1b-74ec-4cf7-9581-c850ad3d023f service nova] Lock "78430e6a-b0a3-400b-91c4-effea838274a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.634330] env[69475]: DEBUG oslo_concurrency.lockutils [req-7fc4ccab-7f1a-4ed1-9d8d-92f5ddf7b985 req-5b3e6a1b-74ec-4cf7-9581-c850ad3d023f service nova] Lock "78430e6a-b0a3-400b-91c4-effea838274a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.634491] env[69475]: DEBUG nova.compute.manager [req-7fc4ccab-7f1a-4ed1-9d8d-92f5ddf7b985 req-5b3e6a1b-74ec-4cf7-9581-c850ad3d023f service nova] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] No waiting events found dispatching network-vif-plugged-b12c0816-a102-4eeb-b8bd-bc03636b077f {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 916.634652] env[69475]: WARNING nova.compute.manager [req-7fc4ccab-7f1a-4ed1-9d8d-92f5ddf7b985 req-5b3e6a1b-74ec-4cf7-9581-c850ad3d023f service nova] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Received unexpected event network-vif-plugged-b12c0816-a102-4eeb-b8bd-bc03636b077f for instance with vm_state building and task_state spawning. [ 916.635011] env[69475]: DEBUG nova.compute.manager [req-7fc4ccab-7f1a-4ed1-9d8d-92f5ddf7b985 req-5b3e6a1b-74ec-4cf7-9581-c850ad3d023f service nova] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Received event network-changed-b12c0816-a102-4eeb-b8bd-bc03636b077f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 916.635011] env[69475]: DEBUG nova.compute.manager [req-7fc4ccab-7f1a-4ed1-9d8d-92f5ddf7b985 req-5b3e6a1b-74ec-4cf7-9581-c850ad3d023f service nova] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Refreshing instance network info cache due to event network-changed-b12c0816-a102-4eeb-b8bd-bc03636b077f. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 916.635331] env[69475]: DEBUG oslo_concurrency.lockutils [req-7fc4ccab-7f1a-4ed1-9d8d-92f5ddf7b985 req-5b3e6a1b-74ec-4cf7-9581-c850ad3d023f service nova] Acquiring lock "refresh_cache-78430e6a-b0a3-400b-91c4-effea838274a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.635497] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cdb6b71e-bee0-4cdc-900c-0636b86e49c4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.644137] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 916.644137] env[69475]: value = "task-3508363" [ 916.644137] env[69475]: _type = "Task" [ 916.644137] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.653970] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508363, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.702906] env[69475]: DEBUG oslo_vmware.api [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508362, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.44704} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.703365] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 916.703752] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 916.705017] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 916.705017] env[69475]: INFO nova.compute.manager [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 916.705017] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 916.705017] env[69475]: DEBUG nova.compute.manager [-] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 916.705017] env[69475]: DEBUG nova.network.neutron [-] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 916.743239] env[69475]: DEBUG nova.compute.manager [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 916.746808] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "refresh_cache-78430e6a-b0a3-400b-91c4-effea838274a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.747318] env[69475]: DEBUG nova.compute.manager [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Instance network_info: |[{"id": "b12c0816-a102-4eeb-b8bd-bc03636b077f", "address": "fa:16:3e:c4:73:07", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb12c0816-a1", "ovs_interfaceid": "b12c0816-a102-4eeb-b8bd-bc03636b077f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 916.748264] env[69475]: DEBUG oslo_concurrency.lockutils [req-7fc4ccab-7f1a-4ed1-9d8d-92f5ddf7b985 req-5b3e6a1b-74ec-4cf7-9581-c850ad3d023f service nova] Acquired lock "refresh_cache-78430e6a-b0a3-400b-91c4-effea838274a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.748578] env[69475]: DEBUG nova.network.neutron [req-7fc4ccab-7f1a-4ed1-9d8d-92f5ddf7b985 req-5b3e6a1b-74ec-4cf7-9581-c850ad3d023f service nova] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Refreshing network info cache for port b12c0816-a102-4eeb-b8bd-bc03636b077f {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 916.750468] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c4:73:07', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b12c0816-a102-4eeb-b8bd-bc03636b077f', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 916.758590] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 916.760409] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 916.761065] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5a61471-2353-4b6d-9b2d-628f27acdcf3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.795646] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 916.795646] env[69475]: value = "task-3508364" [ 916.795646] env[69475]: _type = "Task" [ 916.795646] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.813896] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508364, 'name': CreateVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.845599] env[69475]: DEBUG nova.compute.manager [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 916.845936] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2ed7ff20-13b3-467e-8850-461baf2d700c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.855597] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad0ac2e-c84e-4dbb-91bd-8bd83a420936 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.893959] env[69475]: WARNING nova.virt.vmwareapi.driver [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 8cc0636c-84af-4f68-bec8-1493b421a605 could not be found. [ 916.894019] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 916.897794] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cdd95a07-8e7d-4c59-b536-c657a36da3a0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.908558] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cda662f-ece5-4c11-afcc-118c692fc9cd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.952877] env[69475]: WARNING nova.virt.vmwareapi.vmops [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8cc0636c-84af-4f68-bec8-1493b421a605 could not be found. [ 916.953159] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 916.953309] env[69475]: INFO nova.compute.manager [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Took 0.11 seconds to destroy the instance on the hypervisor. [ 916.953568] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 916.957638] env[69475]: DEBUG nova.compute.manager [-] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 916.957787] env[69475]: DEBUG nova.network.neutron [-] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 916.982542] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "e8c2d21e-2e42-48de-928e-c5fd944899b6" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.982830] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e8c2d21e-2e42-48de-928e-c5fd944899b6" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.984106] env[69475]: INFO nova.compute.manager [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Shelving [ 917.024424] env[69475]: DEBUG nova.network.neutron [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Successfully created port: 9aa71b8b-3116-4297-a480-30aa5caf507d {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 917.158456] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508363, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069298} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.158743] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 917.159945] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc86984-91fa-4175-a310-c74ba196f78c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.197291] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 4b17d080-594b-44e7-83aa-ebe0787722d9/4b17d080-594b-44e7-83aa-ebe0787722d9.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 917.200535] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e98dcfc8-727b-4424-9729-29080b91df47 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.223502] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 917.223502] env[69475]: value = "task-3508365" [ 917.223502] env[69475]: _type = "Task" [ 917.223502] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.235371] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508365, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.314321] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508364, 'name': CreateVM_Task, 'duration_secs': 0.497538} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.314666] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 917.315182] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.315386] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.315672] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 917.315929] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cca1c68-14a6-40b5-9aae-99ae21dacb34 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.323155] env[69475]: DEBUG nova.compute.manager [req-659cbd8a-94fd-4ba1-b87c-c09177866477 req-229c65f9-5691-442a-b29a-face008e9cb5 service nova] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Received event network-vif-deleted-65a50486-30b1-4098-94d5-abba26c7c25b {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 917.323275] env[69475]: INFO nova.compute.manager [req-659cbd8a-94fd-4ba1-b87c-c09177866477 req-229c65f9-5691-442a-b29a-face008e9cb5 service nova] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Neutron deleted interface 65a50486-30b1-4098-94d5-abba26c7c25b; detaching it from the instance and deleting it from the info cache [ 917.323438] env[69475]: DEBUG nova.network.neutron [req-659cbd8a-94fd-4ba1-b87c-c09177866477 req-229c65f9-5691-442a-b29a-face008e9cb5 service nova] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.328618] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 917.328618] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d58383-2a32-6432-e9ac-5400c2b5f398" [ 917.328618] env[69475]: _type = "Task" [ 917.328618] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.339440] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d58383-2a32-6432-e9ac-5400c2b5f398, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.343942] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b8a71f6-03b2-4dfe-82b2-aca74afe79fd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.351211] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10b24fed-39e2-4f8e-9548-cd2cb5d98c35 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.386917] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7843206-8e07-4e70-866b-3f5fea218450 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.397264] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-980d051a-5259-439f-91fe-f68e54f73f97 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.415674] env[69475]: DEBUG nova.compute.provider_tree [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.662289] env[69475]: DEBUG nova.network.neutron [req-7fc4ccab-7f1a-4ed1-9d8d-92f5ddf7b985 req-5b3e6a1b-74ec-4cf7-9581-c850ad3d023f service nova] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Updated VIF entry in instance network info cache for port b12c0816-a102-4eeb-b8bd-bc03636b077f. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 917.662735] env[69475]: DEBUG nova.network.neutron [req-7fc4ccab-7f1a-4ed1-9d8d-92f5ddf7b985 req-5b3e6a1b-74ec-4cf7-9581-c850ad3d023f service nova] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Updating instance_info_cache with network_info: [{"id": "b12c0816-a102-4eeb-b8bd-bc03636b077f", "address": "fa:16:3e:c4:73:07", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb12c0816-a1", "ovs_interfaceid": "b12c0816-a102-4eeb-b8bd-bc03636b077f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.733541] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508365, 'name': ReconfigVM_Task, 'duration_secs': 0.354794} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.737015] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 4b17d080-594b-44e7-83aa-ebe0787722d9/4b17d080-594b-44e7-83aa-ebe0787722d9.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 917.737015] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b786b90d-689e-4f1a-9ed4-fbdc0be39040 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.742226] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 917.742226] env[69475]: value = "task-3508366" [ 917.742226] env[69475]: _type = "Task" [ 917.742226] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.751501] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508366, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.760852] env[69475]: DEBUG nova.compute.manager [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 917.798218] env[69475]: DEBUG nova.virt.hardware [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='79d941e1d0397bcaed61a26449e7d891',container_format='bare',created_at=2025-04-22T09:40:19Z,direct_url=,disk_format='vmdk',id=04a88c3d-f91d-41ae-b78d-8f3d116adc4c,min_disk=1,min_ram=0,name='tempest-test-snap-1200471503',owner='4e6dd9c026624896ae4de7fab35720d8',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-04-22T09:40:34Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 917.798476] env[69475]: DEBUG nova.virt.hardware [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 917.798635] env[69475]: DEBUG nova.virt.hardware [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 917.798816] env[69475]: DEBUG nova.virt.hardware [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 917.798961] env[69475]: DEBUG nova.virt.hardware [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 917.799205] env[69475]: DEBUG nova.virt.hardware [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 917.799433] env[69475]: DEBUG nova.virt.hardware [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 917.799593] env[69475]: DEBUG nova.virt.hardware [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 917.799759] env[69475]: DEBUG nova.virt.hardware [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 917.800494] env[69475]: DEBUG nova.virt.hardware [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 917.800494] env[69475]: DEBUG nova.virt.hardware [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 917.800494] env[69475]: DEBUG nova.network.neutron [-] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.805635] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43afc9c6-9a90-4fd0-971e-945fa5f4e221 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.812091] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcd91622-babb-4967-afbd-ac64f857e9ce {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.828292] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-352d103e-645f-4a11-a560-fc8f708b75a7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.839862] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d58383-2a32-6432-e9ac-5400c2b5f398, 'name': SearchDatastore_Task, 'duration_secs': 0.013845} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.841183] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.841440] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 917.841668] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.841830] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 917.842105] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 917.842688] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8ed05601-7884-4637-9b71-12e97e9e388b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.847182] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46c049b-f33d-4a17-9821-6f8dd9167f54 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.886230] env[69475]: DEBUG nova.compute.manager [req-659cbd8a-94fd-4ba1-b87c-c09177866477 req-229c65f9-5691-442a-b29a-face008e9cb5 service nova] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Detach interface failed, port_id=65a50486-30b1-4098-94d5-abba26c7c25b, reason: Instance a21ec73a-2658-4fc6-9bc1-0e492385d59e could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 917.887900] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 917.888212] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 917.889017] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b76bf58-c457-48de-99c1-3a80d7330510 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.894414] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 917.894414] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526e1531-cfb9-0073-334a-cb44799eb4cd" [ 917.894414] env[69475]: _type = "Task" [ 917.894414] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.903350] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526e1531-cfb9-0073-334a-cb44799eb4cd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.909090] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "a3ee83aa-f753-49e3-9db2-b1b67d6d211e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.909090] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "a3ee83aa-f753-49e3-9db2-b1b67d6d211e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.909090] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "a3ee83aa-f753-49e3-9db2-b1b67d6d211e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.909090] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "a3ee83aa-f753-49e3-9db2-b1b67d6d211e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.909090] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "a3ee83aa-f753-49e3-9db2-b1b67d6d211e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.910527] env[69475]: INFO nova.compute.manager [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Terminating instance [ 917.921794] env[69475]: DEBUG nova.scheduler.client.report [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 917.991579] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 917.991911] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99763c85-9281-4911-981d-ee86302b3dd5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.998621] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 917.998621] env[69475]: value = "task-3508367" [ 917.998621] env[69475]: _type = "Task" [ 917.998621] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.009629] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508367, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.166365] env[69475]: DEBUG oslo_concurrency.lockutils [req-7fc4ccab-7f1a-4ed1-9d8d-92f5ddf7b985 req-5b3e6a1b-74ec-4cf7-9581-c850ad3d023f service nova] Releasing lock "refresh_cache-78430e6a-b0a3-400b-91c4-effea838274a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.252817] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508366, 'name': Rename_Task, 'duration_secs': 0.144713} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.257264] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 918.257872] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-be1a2cda-24e2-4a65-ab04-edec07ba1415 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.264893] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 918.264893] env[69475]: value = "task-3508368" [ 918.264893] env[69475]: _type = "Task" [ 918.264893] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.274498] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508368, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.306281] env[69475]: INFO nova.compute.manager [-] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Took 1.60 seconds to deallocate network for instance. [ 918.408520] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526e1531-cfb9-0073-334a-cb44799eb4cd, 'name': SearchDatastore_Task, 'duration_secs': 0.026495} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.408721] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1447f20-d029-44e3-b22d-37e24422096f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.414853] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 918.414853] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5245ccd4-88a7-506c-862c-889b975443ff" [ 918.414853] env[69475]: _type = "Task" [ 918.414853] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.419557] env[69475]: DEBUG nova.compute.manager [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 918.419557] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 918.421673] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b9cc3be-bb4b-47ee-abaf-ce549b6257bb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.427279] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.704s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.427857] env[69475]: DEBUG nova.compute.manager [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 918.436420] env[69475]: DEBUG oslo_concurrency.lockutils [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.858s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.436815] env[69475]: DEBUG oslo_concurrency.lockutils [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.439244] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 13.850s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.440683] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5245ccd4-88a7-506c-862c-889b975443ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.445255] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 918.445255] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d5403a0-c3a4-4289-98f7-ccfbd4a4fcb5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.451298] env[69475]: DEBUG oslo_vmware.api [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 918.451298] env[69475]: value = "task-3508369" [ 918.451298] env[69475]: _type = "Task" [ 918.451298] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.460876] env[69475]: DEBUG oslo_vmware.api [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508369, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.476872] env[69475]: INFO nova.scheduler.client.report [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleted allocations for instance 86464a01-e034-43b6-a6d5-45f9e3b6715b [ 918.512716] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508367, 'name': PowerOffVM_Task, 'duration_secs': 0.284844} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.512716] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 918.512716] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32e6150-7109-4a43-8dac-8f865547eba4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.534112] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe41240-023d-447d-849e-70771e50e211 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.574735] env[69475]: DEBUG nova.network.neutron [-] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.679572] env[69475]: DEBUG nova.compute.manager [req-58a3300d-45df-4eb3-b06a-045c5cb32f40 req-4dec18a3-1778-41a8-a2fb-f715fe0fd9f3 service nova] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Received event network-vif-deleted-3041b80e-1b4f-454f-92b6-d002b52423b5 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 918.778617] env[69475]: DEBUG oslo_vmware.api [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508368, 'name': PowerOnVM_Task, 'duration_secs': 0.50695} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.778617] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 918.778617] env[69475]: INFO nova.compute.manager [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Took 10.58 seconds to spawn the instance on the hypervisor. [ 918.778617] env[69475]: DEBUG nova.compute.manager [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 918.778617] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f650b87-127e-418f-acb1-0448ce0246ed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.817258] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.931378] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5245ccd4-88a7-506c-862c-889b975443ff, 'name': SearchDatastore_Task, 'duration_secs': 0.016823} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.931999] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 918.934019] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 78430e6a-b0a3-400b-91c4-effea838274a/78430e6a-b0a3-400b-91c4-effea838274a.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 918.934019] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-724cc3fb-d61f-43e9-893a-6614c23d3cd4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.944089] env[69475]: DEBUG nova.compute.utils [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 918.949730] env[69475]: INFO nova.compute.claims [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 918.954057] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 918.954057] env[69475]: value = "task-3508370" [ 918.954057] env[69475]: _type = "Task" [ 918.954057] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.955120] env[69475]: DEBUG nova.compute.manager [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 918.956793] env[69475]: DEBUG nova.network.neutron [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 918.972890] env[69475]: DEBUG oslo_vmware.api [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508369, 'name': PowerOffVM_Task, 'duration_secs': 0.172515} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.976328] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 918.976740] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 918.977146] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508370, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.979461] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-56236a28-7856-4327-b2b0-9bd976aacf37 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.984820] env[69475]: DEBUG oslo_concurrency.lockutils [None req-02d89329-d524-4d64-a0b2-0ed5ca1480b1 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "86464a01-e034-43b6-a6d5-45f9e3b6715b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.311s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.046551] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 919.047309] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-77cef5e5-2fb0-4047-b93a-475c587fa31f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.057344] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 919.057344] env[69475]: value = "task-3508372" [ 919.057344] env[69475]: _type = "Task" [ 919.057344] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.070317] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508372, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.080894] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 919.080894] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 919.080894] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Deleting the datastore file [datastore1] a3ee83aa-f753-49e3-9db2-b1b67d6d211e {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 919.080894] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-58a6d403-9a1d-4939-a21a-043bf3731cbe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.086028] env[69475]: INFO nova.compute.manager [-] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Took 2.13 seconds to deallocate network for instance. [ 919.093017] env[69475]: DEBUG oslo_vmware.api [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 919.093017] env[69475]: value = "task-3508373" [ 919.093017] env[69475]: _type = "Task" [ 919.093017] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.100728] env[69475]: DEBUG oslo_vmware.api [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508373, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.131566] env[69475]: DEBUG nova.policy [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82f6c3724a2b4430b8df87655ff91c63', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1073981d0d7740e78805798e02ff9d55', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 919.306171] env[69475]: INFO nova.compute.manager [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Took 36.86 seconds to build instance. [ 919.391372] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "8f18d683-7734-4798-8963-7336fe229f16" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 919.391372] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "8f18d683-7734-4798-8963-7336fe229f16" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.457034] env[69475]: DEBUG nova.compute.manager [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 919.463561] env[69475]: INFO nova.compute.resource_tracker [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Updating resource usage from migration 5f455cf6-0177-44bd-8ddf-bf490d77efd6 [ 919.488897] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508370, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.578774] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508372, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.606563] env[69475]: DEBUG oslo_vmware.api [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508373, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.282279} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.607571] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 919.607917] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 919.608157] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 919.608344] env[69475]: INFO nova.compute.manager [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Took 1.19 seconds to destroy the instance on the hypervisor. [ 919.608588] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 919.608782] env[69475]: DEBUG nova.compute.manager [-] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 919.608878] env[69475]: DEBUG nova.network.neutron [-] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 919.654613] env[69475]: DEBUG nova.network.neutron [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Successfully updated port: 9aa71b8b-3116-4297-a480-30aa5caf507d {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 919.662794] env[69475]: INFO nova.compute.manager [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Took 0.58 seconds to detach 1 volumes for instance. [ 919.667611] env[69475]: DEBUG nova.compute.manager [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Deleting volume: 44671911-bc3c-459e-8572-d2ff086a0071 {{(pid=69475) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 919.810708] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d119baa5-87d5-4263-9b30-21b5657b8506 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "4b17d080-594b-44e7-83aa-ebe0787722d9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.373s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.893376] env[69475]: DEBUG nova.compute.manager [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 919.966473] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1c1a72-6ee5-4bcb-b5b7-14be5a8aea86 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.984317] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb88a8f5-b336-4dbc-b55c-ce41aec1f0be {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.987658] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508370, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.602247} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.987943] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 78430e6a-b0a3-400b-91c4-effea838274a/78430e6a-b0a3-400b-91c4-effea838274a.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 919.988557] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 919.988856] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ea4f6f70-2b7e-416e-af4d-2bc357e8486f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.018771] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-832ac284-efdc-43cf-ba4c-c6ebd3577351 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.022857] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 920.022857] env[69475]: value = "task-3508375" [ 920.022857] env[69475]: _type = "Task" [ 920.022857] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.030502] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adbef4b6-2f22-4a0b-b24f-dbb09d9e2732 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.037978] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508375, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.048690] env[69475]: DEBUG nova.compute.provider_tree [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 920.073484] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508372, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.158241] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "refresh_cache-2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 920.158393] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired lock "refresh_cache-2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 920.158548] env[69475]: DEBUG nova.network.neutron [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 920.223226] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.421705] env[69475]: DEBUG nova.network.neutron [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Successfully created port: afb4cf7c-0e25-4b9a-8f0d-90f08fecda68 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 920.424808] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.478784] env[69475]: DEBUG nova.compute.manager [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 920.514497] env[69475]: DEBUG nova.virt.hardware [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 920.516252] env[69475]: DEBUG nova.virt.hardware [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 920.516473] env[69475]: DEBUG nova.virt.hardware [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 920.516687] env[69475]: DEBUG nova.virt.hardware [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 920.516838] env[69475]: DEBUG nova.virt.hardware [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 920.517015] env[69475]: DEBUG nova.virt.hardware [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 920.517265] env[69475]: DEBUG nova.virt.hardware [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 920.517458] env[69475]: DEBUG nova.virt.hardware [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 920.519452] env[69475]: DEBUG nova.virt.hardware [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 920.519739] env[69475]: DEBUG nova.virt.hardware [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 920.519963] env[69475]: DEBUG nova.virt.hardware [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 920.521286] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b29e82-98bd-483b-8db9-858ced2f81f8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.539140] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6be4e2-339f-4cc2-a001-e5a04c21c7e0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.543429] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508375, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072042} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.543719] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 920.544958] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd28bf7d-d101-48f7-9753-e315ba2b443f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.558155] env[69475]: DEBUG nova.scheduler.client.report [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 920.585014] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Reconfiguring VM instance instance-0000004c to attach disk [datastore2] 78430e6a-b0a3-400b-91c4-effea838274a/78430e6a-b0a3-400b-91c4-effea838274a.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 920.586790] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86c720b2-e717-4609-a457-3f6be3e7eb4e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.604901] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508372, 'name': CreateSnapshot_Task, 'duration_secs': 1.071572} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.605568] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 920.606325] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42108070-265d-4f12-b334-1f4f14ddef15 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.610756] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 920.610756] env[69475]: value = "task-3508376" [ 920.610756] env[69475]: _type = "Task" [ 920.610756] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.626407] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508376, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.701026] env[69475]: DEBUG nova.network.neutron [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 920.880600] env[69475]: DEBUG nova.network.neutron [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Updating instance_info_cache with network_info: [{"id": "9aa71b8b-3116-4297-a480-30aa5caf507d", "address": "fa:16:3e:ea:34:2a", "network": {"id": "49b579f0-c5a9-487e-b469-7e6420355dce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-83616973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6dd9c026624896ae4de7fab35720d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa71b8b-31", "ovs_interfaceid": "9aa71b8b-3116-4297-a480-30aa5caf507d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.910278] env[69475]: DEBUG nova.network.neutron [-] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.019177] env[69475]: DEBUG nova.compute.manager [req-cfd31a0a-e692-40eb-975a-74d6c49ca86f req-344dc6e8-45f7-4164-b222-53c550d5c198 service nova] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Received event network-vif-deleted-4246ca40-af00-4315-b24a-c4e3217dfdb2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 921.065962] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.627s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.066199] env[69475]: INFO nova.compute.manager [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Migrating [ 921.072849] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.624s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.074295] env[69475]: INFO nova.compute.claims [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 921.128096] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 921.128096] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-20394a99-05d1-463f-a79e-2d4294212f70 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.134815] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508376, 'name': ReconfigVM_Task, 'duration_secs': 0.308585} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.135458] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Reconfigured VM instance instance-0000004c to attach disk [datastore2] 78430e6a-b0a3-400b-91c4-effea838274a/78430e6a-b0a3-400b-91c4-effea838274a.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 921.136542] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7001d88-6309-432a-ba36-1a34266919ba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.140150] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 921.140150] env[69475]: value = "task-3508377" [ 921.140150] env[69475]: _type = "Task" [ 921.140150] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.144532] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 921.144532] env[69475]: value = "task-3508378" [ 921.144532] env[69475]: _type = "Task" [ 921.144532] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.152244] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508377, 'name': CloneVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.156470] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508378, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.317488] env[69475]: DEBUG nova.compute.manager [req-c154ffa5-b825-4e90-acc0-fac12d609b77 req-ba21e4dc-39ea-42f9-a1ad-e75bd38aa2fb service nova] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Received event network-vif-plugged-9aa71b8b-3116-4297-a480-30aa5caf507d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 921.318405] env[69475]: DEBUG oslo_concurrency.lockutils [req-c154ffa5-b825-4e90-acc0-fac12d609b77 req-ba21e4dc-39ea-42f9-a1ad-e75bd38aa2fb service nova] Acquiring lock "2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.318939] env[69475]: DEBUG oslo_concurrency.lockutils [req-c154ffa5-b825-4e90-acc0-fac12d609b77 req-ba21e4dc-39ea-42f9-a1ad-e75bd38aa2fb service nova] Lock "2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.318939] env[69475]: DEBUG oslo_concurrency.lockutils [req-c154ffa5-b825-4e90-acc0-fac12d609b77 req-ba21e4dc-39ea-42f9-a1ad-e75bd38aa2fb service nova] Lock "2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.318939] env[69475]: DEBUG nova.compute.manager [req-c154ffa5-b825-4e90-acc0-fac12d609b77 req-ba21e4dc-39ea-42f9-a1ad-e75bd38aa2fb service nova] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] No waiting events found dispatching network-vif-plugged-9aa71b8b-3116-4297-a480-30aa5caf507d {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 921.319413] env[69475]: WARNING nova.compute.manager [req-c154ffa5-b825-4e90-acc0-fac12d609b77 req-ba21e4dc-39ea-42f9-a1ad-e75bd38aa2fb service nova] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Received unexpected event network-vif-plugged-9aa71b8b-3116-4297-a480-30aa5caf507d for instance with vm_state building and task_state spawning. [ 921.319413] env[69475]: DEBUG nova.compute.manager [req-c154ffa5-b825-4e90-acc0-fac12d609b77 req-ba21e4dc-39ea-42f9-a1ad-e75bd38aa2fb service nova] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Received event network-changed-9aa71b8b-3116-4297-a480-30aa5caf507d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 921.319413] env[69475]: DEBUG nova.compute.manager [req-c154ffa5-b825-4e90-acc0-fac12d609b77 req-ba21e4dc-39ea-42f9-a1ad-e75bd38aa2fb service nova] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Refreshing instance network info cache due to event network-changed-9aa71b8b-3116-4297-a480-30aa5caf507d. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 921.319568] env[69475]: DEBUG oslo_concurrency.lockutils [req-c154ffa5-b825-4e90-acc0-fac12d609b77 req-ba21e4dc-39ea-42f9-a1ad-e75bd38aa2fb service nova] Acquiring lock "refresh_cache-2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.388955] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Releasing lock "refresh_cache-2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 921.389322] env[69475]: DEBUG nova.compute.manager [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Instance network_info: |[{"id": "9aa71b8b-3116-4297-a480-30aa5caf507d", "address": "fa:16:3e:ea:34:2a", "network": {"id": "49b579f0-c5a9-487e-b469-7e6420355dce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-83616973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6dd9c026624896ae4de7fab35720d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa71b8b-31", "ovs_interfaceid": "9aa71b8b-3116-4297-a480-30aa5caf507d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 921.389628] env[69475]: DEBUG oslo_concurrency.lockutils [req-c154ffa5-b825-4e90-acc0-fac12d609b77 req-ba21e4dc-39ea-42f9-a1ad-e75bd38aa2fb service nova] Acquired lock "refresh_cache-2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.389801] env[69475]: DEBUG nova.network.neutron [req-c154ffa5-b825-4e90-acc0-fac12d609b77 req-ba21e4dc-39ea-42f9-a1ad-e75bd38aa2fb service nova] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Refreshing network info cache for port 9aa71b8b-3116-4297-a480-30aa5caf507d {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 921.391532] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:34:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9aa71b8b-3116-4297-a480-30aa5caf507d', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 921.400361] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 921.400681] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 921.400956] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb3c2573-dcb6-4f2b-bb7b-de6a33eb8ec0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.417995] env[69475]: INFO nova.compute.manager [-] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Took 1.81 seconds to deallocate network for instance. [ 921.424113] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 921.424113] env[69475]: value = "task-3508379" [ 921.424113] env[69475]: _type = "Task" [ 921.424113] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.434076] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508379, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.591165] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "refresh_cache-b1b04eb9-ded6-4425-8a06-0c26c086a09b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.591392] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquired lock "refresh_cache-b1b04eb9-ded6-4425-8a06-0c26c086a09b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.591619] env[69475]: DEBUG nova.network.neutron [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 921.659681] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508377, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.663625] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508378, 'name': Rename_Task, 'duration_secs': 0.161212} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.663959] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 921.664247] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44eb6941-a596-42c1-b8bc-f4b7ec2f15cd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.670728] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 921.670728] env[69475]: value = "task-3508380" [ 921.670728] env[69475]: _type = "Task" [ 921.670728] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.679484] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508380, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.929719] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.937377] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508379, 'name': CreateVM_Task, 'duration_secs': 0.454398} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.937751] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 921.938852] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/04a88c3d-f91d-41ae-b78d-8f3d116adc4c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.938852] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired lock "[datastore1] devstack-image-cache_base/04a88c3d-f91d-41ae-b78d-8f3d116adc4c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.940192] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/04a88c3d-f91d-41ae-b78d-8f3d116adc4c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 921.941403] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86f63f3e-12c8-427b-b669-43669fd667d6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.948321] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 921.948321] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b1b925-4ea9-c46e-36ac-9debaeee1ced" [ 921.948321] env[69475]: _type = "Task" [ 921.948321] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.961285] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b1b925-4ea9-c46e-36ac-9debaeee1ced, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.168064] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508377, 'name': CloneVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.197162] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508380, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.244065] env[69475]: DEBUG nova.network.neutron [req-c154ffa5-b825-4e90-acc0-fac12d609b77 req-ba21e4dc-39ea-42f9-a1ad-e75bd38aa2fb service nova] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Updated VIF entry in instance network info cache for port 9aa71b8b-3116-4297-a480-30aa5caf507d. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 922.244065] env[69475]: DEBUG nova.network.neutron [req-c154ffa5-b825-4e90-acc0-fac12d609b77 req-ba21e4dc-39ea-42f9-a1ad-e75bd38aa2fb service nova] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Updating instance_info_cache with network_info: [{"id": "9aa71b8b-3116-4297-a480-30aa5caf507d", "address": "fa:16:3e:ea:34:2a", "network": {"id": "49b579f0-c5a9-487e-b469-7e6420355dce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-83616973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6dd9c026624896ae4de7fab35720d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa71b8b-31", "ovs_interfaceid": "9aa71b8b-3116-4297-a480-30aa5caf507d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 922.466999] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Releasing lock "[datastore1] devstack-image-cache_base/04a88c3d-f91d-41ae-b78d-8f3d116adc4c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.467120] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Processing image 04a88c3d-f91d-41ae-b78d-8f3d116adc4c {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 922.467439] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/04a88c3d-f91d-41ae-b78d-8f3d116adc4c/04a88c3d-f91d-41ae-b78d-8f3d116adc4c.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.467686] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired lock "[datastore1] devstack-image-cache_base/04a88c3d-f91d-41ae-b78d-8f3d116adc4c/04a88c3d-f91d-41ae-b78d-8f3d116adc4c.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.467979] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 922.468381] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-81f6545e-b8ad-4846-b9e8-a5c487943efa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.482078] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 922.482391] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 922.483780] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16faa6c6-6512-4829-9cb2-139ca37136cd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.490792] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 922.490792] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d60c92-5cad-8263-c629-26a48507e55b" [ 922.490792] env[69475]: _type = "Task" [ 922.490792] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.507512] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d60c92-5cad-8263-c629-26a48507e55b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.605019] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a0765a5-a827-42bc-891b-dc2b06b04d87 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.616445] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa2ec2d-324e-46d2-b9c4-ef0453d76807 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.653589] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db64cbae-7863-4a95-a7ca-d15c26609fe1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.670037] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508377, 'name': CloneVM_Task, 'duration_secs': 1.110789} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.673283] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Created linked-clone VM from snapshot [ 922.674264] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eef573e-e060-4d9c-8611-d8e9de0023ef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.681121] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56d95613-cadd-4d5b-a5d6-db2a893df742 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.692240] env[69475]: DEBUG oslo_vmware.api [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508380, 'name': PowerOnVM_Task, 'duration_secs': 0.621442} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.703370] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 922.703757] env[69475]: INFO nova.compute.manager [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Took 9.65 seconds to spawn the instance on the hypervisor. [ 922.703757] env[69475]: DEBUG nova.compute.manager [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 922.704173] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Uploading image 079770cf-a859-4f7a-ae7c-ef25478face9 {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 922.706396] env[69475]: DEBUG nova.compute.provider_tree [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.709735] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c6f8590-d64b-41be-9166-999f34640b81 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.717941] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "4066a18f-acc5-49b5-941c-0711f29bdcd2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 922.717941] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "4066a18f-acc5-49b5-941c-0711f29bdcd2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 922.746206] env[69475]: DEBUG oslo_concurrency.lockutils [req-c154ffa5-b825-4e90-acc0-fac12d609b77 req-ba21e4dc-39ea-42f9-a1ad-e75bd38aa2fb service nova] Releasing lock "refresh_cache-2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.751076] env[69475]: DEBUG oslo_vmware.rw_handles [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 922.751076] env[69475]: value = "vm-701033" [ 922.751076] env[69475]: _type = "VirtualMachine" [ 922.751076] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 922.751353] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d08a8d16-c5e7-42ad-96e2-745f956e2975 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.766208] env[69475]: DEBUG oslo_vmware.rw_handles [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lease: (returnval){ [ 922.766208] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524ab7ca-834a-3e3e-d288-f1e61ba3ec52" [ 922.766208] env[69475]: _type = "HttpNfcLease" [ 922.766208] env[69475]: } obtained for exporting VM: (result){ [ 922.766208] env[69475]: value = "vm-701033" [ 922.766208] env[69475]: _type = "VirtualMachine" [ 922.766208] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 922.766610] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the lease: (returnval){ [ 922.766610] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524ab7ca-834a-3e3e-d288-f1e61ba3ec52" [ 922.766610] env[69475]: _type = "HttpNfcLease" [ 922.766610] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 922.773094] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 922.773094] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524ab7ca-834a-3e3e-d288-f1e61ba3ec52" [ 922.773094] env[69475]: _type = "HttpNfcLease" [ 922.773094] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 922.921588] env[69475]: DEBUG nova.network.neutron [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Updating instance_info_cache with network_info: [{"id": "f9a10762-ba87-425f-9623-1ffdf22c5bb4", "address": "fa:16:3e:83:ce:8a", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.106", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9a10762-ba", "ovs_interfaceid": "f9a10762-ba87-425f-9623-1ffdf22c5bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.008346] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Preparing fetch location {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 923.009240] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Fetch image to [datastore1] OSTACK_IMG_8fa5e978-4e3c-4dbe-b209-41af95560766/OSTACK_IMG_8fa5e978-4e3c-4dbe-b209-41af95560766.vmdk {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 923.009240] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Downloading stream optimized image 04a88c3d-f91d-41ae-b78d-8f3d116adc4c to [datastore1] OSTACK_IMG_8fa5e978-4e3c-4dbe-b209-41af95560766/OSTACK_IMG_8fa5e978-4e3c-4dbe-b209-41af95560766.vmdk on the data store datastore1 as vApp {{(pid=69475) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 923.009240] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Downloading image file data 04a88c3d-f91d-41ae-b78d-8f3d116adc4c to the ESX as VM named 'OSTACK_IMG_8fa5e978-4e3c-4dbe-b209-41af95560766' {{(pid=69475) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 923.111348] env[69475]: DEBUG oslo_vmware.rw_handles [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 923.111348] env[69475]: value = "resgroup-9" [ 923.111348] env[69475]: _type = "ResourcePool" [ 923.111348] env[69475]: }. {{(pid=69475) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 923.111651] env[69475]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-abb3a9b7-2d2a-43ef-a372-e43f9ef4a086 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.137270] env[69475]: DEBUG oslo_vmware.rw_handles [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lease: (returnval){ [ 923.137270] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528e6958-86b8-47c4-6ee0-d13074e363e7" [ 923.137270] env[69475]: _type = "HttpNfcLease" [ 923.137270] env[69475]: } obtained for vApp import into resource pool (val){ [ 923.137270] env[69475]: value = "resgroup-9" [ 923.137270] env[69475]: _type = "ResourcePool" [ 923.137270] env[69475]: }. {{(pid=69475) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 923.137587] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the lease: (returnval){ [ 923.137587] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528e6958-86b8-47c4-6ee0-d13074e363e7" [ 923.137587] env[69475]: _type = "HttpNfcLease" [ 923.137587] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 923.144815] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 923.144815] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528e6958-86b8-47c4-6ee0-d13074e363e7" [ 923.144815] env[69475]: _type = "HttpNfcLease" [ 923.144815] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 923.199094] env[69475]: DEBUG nova.network.neutron [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Successfully updated port: afb4cf7c-0e25-4b9a-8f0d-90f08fecda68 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 923.218135] env[69475]: DEBUG nova.scheduler.client.report [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 923.222540] env[69475]: DEBUG nova.compute.manager [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 923.247399] env[69475]: INFO nova.compute.manager [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Took 35.86 seconds to build instance. [ 923.276427] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 923.276427] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524ab7ca-834a-3e3e-d288-f1e61ba3ec52" [ 923.276427] env[69475]: _type = "HttpNfcLease" [ 923.276427] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 923.276843] env[69475]: DEBUG oslo_vmware.rw_handles [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 923.276843] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524ab7ca-834a-3e3e-d288-f1e61ba3ec52" [ 923.276843] env[69475]: _type = "HttpNfcLease" [ 923.276843] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 923.277473] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfcdcd77-261b-4564-a4a8-345d54254c92 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.286645] env[69475]: DEBUG oslo_vmware.rw_handles [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529a88e4-f45d-29ab-54ab-b5dd31b2b97b/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 923.286930] env[69475]: DEBUG oslo_vmware.rw_handles [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529a88e4-f45d-29ab-54ab-b5dd31b2b97b/disk-0.vmdk for reading. {{(pid=69475) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 923.426036] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Releasing lock "refresh_cache-b1b04eb9-ded6-4425-8a06-0c26c086a09b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.433024] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-0d33aae2-2fb5-436d-991d-4a3ca175eae4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.647943] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 923.647943] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528e6958-86b8-47c4-6ee0-d13074e363e7" [ 923.647943] env[69475]: _type = "HttpNfcLease" [ 923.647943] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 923.707321] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "refresh_cache-b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.707518] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "refresh_cache-b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.707940] env[69475]: DEBUG nova.network.neutron [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 923.715805] env[69475]: DEBUG nova.compute.manager [req-1ac546a4-a826-413f-b645-5002d4526dd3 req-d2ea7d2d-c668-4743-931d-14543759811e service nova] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Received event network-vif-plugged-afb4cf7c-0e25-4b9a-8f0d-90f08fecda68 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 923.715950] env[69475]: DEBUG oslo_concurrency.lockutils [req-1ac546a4-a826-413f-b645-5002d4526dd3 req-d2ea7d2d-c668-4743-931d-14543759811e service nova] Acquiring lock "b8c50d0a-4b3d-4b70-9bd6-8304fa128e59-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.716222] env[69475]: DEBUG oslo_concurrency.lockutils [req-1ac546a4-a826-413f-b645-5002d4526dd3 req-d2ea7d2d-c668-4743-931d-14543759811e service nova] Lock "b8c50d0a-4b3d-4b70-9bd6-8304fa128e59-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.716437] env[69475]: DEBUG oslo_concurrency.lockutils [req-1ac546a4-a826-413f-b645-5002d4526dd3 req-d2ea7d2d-c668-4743-931d-14543759811e service nova] Lock "b8c50d0a-4b3d-4b70-9bd6-8304fa128e59-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.716643] env[69475]: DEBUG nova.compute.manager [req-1ac546a4-a826-413f-b645-5002d4526dd3 req-d2ea7d2d-c668-4743-931d-14543759811e service nova] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] No waiting events found dispatching network-vif-plugged-afb4cf7c-0e25-4b9a-8f0d-90f08fecda68 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 923.717240] env[69475]: WARNING nova.compute.manager [req-1ac546a4-a826-413f-b645-5002d4526dd3 req-d2ea7d2d-c668-4743-931d-14543759811e service nova] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Received unexpected event network-vif-plugged-afb4cf7c-0e25-4b9a-8f0d-90f08fecda68 for instance with vm_state building and task_state spawning. [ 923.718146] env[69475]: DEBUG nova.compute.manager [req-1ac546a4-a826-413f-b645-5002d4526dd3 req-d2ea7d2d-c668-4743-931d-14543759811e service nova] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Received event network-changed-afb4cf7c-0e25-4b9a-8f0d-90f08fecda68 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 923.718836] env[69475]: DEBUG nova.compute.manager [req-1ac546a4-a826-413f-b645-5002d4526dd3 req-d2ea7d2d-c668-4743-931d-14543759811e service nova] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Refreshing instance network info cache due to event network-changed-afb4cf7c-0e25-4b9a-8f0d-90f08fecda68. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 923.718836] env[69475]: DEBUG oslo_concurrency.lockutils [req-1ac546a4-a826-413f-b645-5002d4526dd3 req-d2ea7d2d-c668-4743-931d-14543759811e service nova] Acquiring lock "refresh_cache-b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.730009] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.656s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.730009] env[69475]: DEBUG nova.compute.manager [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 923.736020] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.833s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.736020] env[69475]: INFO nova.compute.claims [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 923.749830] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e98fded-01dd-4e44-828e-b91a8ca5555d tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "78430e6a-b0a3-400b-91c4-effea838274a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.371s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.764447] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.148887] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 924.148887] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528e6958-86b8-47c4-6ee0-d13074e363e7" [ 924.148887] env[69475]: _type = "HttpNfcLease" [ 924.148887] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 924.239446] env[69475]: DEBUG nova.compute.utils [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 924.246098] env[69475]: DEBUG nova.compute.manager [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 924.246257] env[69475]: DEBUG nova.network.neutron [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 924.316787] env[69475]: DEBUG nova.network.neutron [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 924.366998] env[69475]: DEBUG nova.policy [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f5b01df7c5814dd38f7cf5c65af89316', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5ab334f7e3de413eb9fb934d82951ccb', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 924.563971] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "ff09407e-93ea-4919-ba5f-b7ee6dd018a4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.564405] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "ff09407e-93ea-4919-ba5f-b7ee6dd018a4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 924.651832] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 924.651832] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528e6958-86b8-47c4-6ee0-d13074e363e7" [ 924.651832] env[69475]: _type = "HttpNfcLease" [ 924.651832] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 924.750333] env[69475]: DEBUG nova.compute.manager [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 924.952630] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078880cb-577c-4017-b622-0b81e15998eb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.960287] env[69475]: DEBUG nova.network.neutron [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Updating instance_info_cache with network_info: [{"id": "afb4cf7c-0e25-4b9a-8f0d-90f08fecda68", "address": "fa:16:3e:29:af:35", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafb4cf7c-0e", "ovs_interfaceid": "afb4cf7c-0e25-4b9a-8f0d-90f08fecda68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.987173] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Updating instance 'b1b04eb9-ded6-4425-8a06-0c26c086a09b' progress to 0 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 925.068326] env[69475]: DEBUG nova.compute.manager [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 925.154443] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 925.154443] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528e6958-86b8-47c4-6ee0-d13074e363e7" [ 925.154443] env[69475]: _type = "HttpNfcLease" [ 925.154443] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 925.162874] env[69475]: DEBUG nova.network.neutron [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Successfully created port: 02e51603-f6e4-43c3-9a38-465fab447405 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 925.265522] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bcb2d45-4764-4989-add7-aa4fcbdde1e0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.275637] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d963c2-d298-404c-9066-9c7294537da3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.312776] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c24c87ef-dd5b-4983-9b04-59a58b7bf9fe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.322092] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2ceca8-582e-4918-8cd5-387aa1e74294 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.336512] env[69475]: DEBUG nova.compute.provider_tree [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.464065] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "refresh_cache-b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.464065] env[69475]: DEBUG nova.compute.manager [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Instance network_info: |[{"id": "afb4cf7c-0e25-4b9a-8f0d-90f08fecda68", "address": "fa:16:3e:29:af:35", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafb4cf7c-0e", "ovs_interfaceid": "afb4cf7c-0e25-4b9a-8f0d-90f08fecda68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 925.464065] env[69475]: DEBUG oslo_concurrency.lockutils [req-1ac546a4-a826-413f-b645-5002d4526dd3 req-d2ea7d2d-c668-4743-931d-14543759811e service nova] Acquired lock "refresh_cache-b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.464273] env[69475]: DEBUG nova.network.neutron [req-1ac546a4-a826-413f-b645-5002d4526dd3 req-d2ea7d2d-c668-4743-931d-14543759811e service nova] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Refreshing network info cache for port afb4cf7c-0e25-4b9a-8f0d-90f08fecda68 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 925.466993] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:29:af:35', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e23c1d18-c841-49ea-95f3-df5ceac28afd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'afb4cf7c-0e25-4b9a-8f0d-90f08fecda68', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 925.479542] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 925.483099] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 925.483729] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-85fb172c-9baf-4fd5-b3d7-de5fb80e8f70 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.501990] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 925.502347] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00009215-6f03-4a7b-931c-d409ef4fbf13 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.510131] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 925.510131] env[69475]: value = "task-3508383" [ 925.510131] env[69475]: _type = "Task" [ 925.510131] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.511136] env[69475]: DEBUG oslo_vmware.api [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 925.511136] env[69475]: value = "task-3508384" [ 925.511136] env[69475]: _type = "Task" [ 925.511136] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.526871] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508383, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.605311] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 925.653200] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 925.653200] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528e6958-86b8-47c4-6ee0-d13074e363e7" [ 925.653200] env[69475]: _type = "HttpNfcLease" [ 925.653200] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 925.656535] env[69475]: DEBUG oslo_vmware.rw_handles [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 925.656535] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528e6958-86b8-47c4-6ee0-d13074e363e7" [ 925.656535] env[69475]: _type = "HttpNfcLease" [ 925.656535] env[69475]: }. {{(pid=69475) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 925.657620] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58601be0-ca8b-45e1-ba70-cd36c249d708 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.666396] env[69475]: DEBUG oslo_vmware.rw_handles [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527445b6-5e8c-a9bf-1fdc-e04bbec2903a/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 925.666599] env[69475]: DEBUG oslo_vmware.rw_handles [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527445b6-5e8c-a9bf-1fdc-e04bbec2903a/disk-0.vmdk. {{(pid=69475) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 925.742492] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-16e5ecda-5cc6-44f9-8070-cc113a6035ee {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.762045] env[69475]: DEBUG nova.compute.manager [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 925.842965] env[69475]: DEBUG nova.scheduler.client.report [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 925.868179] env[69475]: DEBUG nova.network.neutron [req-1ac546a4-a826-413f-b645-5002d4526dd3 req-d2ea7d2d-c668-4743-931d-14543759811e service nova] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Updated VIF entry in instance network info cache for port afb4cf7c-0e25-4b9a-8f0d-90f08fecda68. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 925.868559] env[69475]: DEBUG nova.network.neutron [req-1ac546a4-a826-413f-b645-5002d4526dd3 req-d2ea7d2d-c668-4743-931d-14543759811e service nova] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Updating instance_info_cache with network_info: [{"id": "afb4cf7c-0e25-4b9a-8f0d-90f08fecda68", "address": "fa:16:3e:29:af:35", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafb4cf7c-0e", "ovs_interfaceid": "afb4cf7c-0e25-4b9a-8f0d-90f08fecda68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.026008] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508383, 'name': CreateVM_Task, 'duration_secs': 0.40367} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.030712] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 926.031203] env[69475]: DEBUG oslo_vmware.api [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508384, 'name': PowerOffVM_Task, 'duration_secs': 0.321276} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.031745] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.031908] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 926.032238] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 926.032704] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 926.032704] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Updating instance 'b1b04eb9-ded6-4425-8a06-0c26c086a09b' progress to 17 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 926.039467] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fbfddf9-75fd-460f-9b40-395c7c5c1361 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.045656] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 926.045656] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d24626-4aed-106a-a006-a2d2f38855d2" [ 926.045656] env[69475]: _type = "Task" [ 926.045656] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.058974] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d24626-4aed-106a-a006-a2d2f38855d2, 'name': SearchDatastore_Task, 'duration_secs': 0.011587} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.060563] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.060924] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 926.061266] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.061452] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 926.061697] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 926.061989] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6f4c6f03-70fc-4242-872e-5341a783d3c4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.072524] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 926.072752] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 926.073712] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63e02a30-234a-4c30-90b2-87e44f2ee220 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.080606] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 926.080606] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5201bdb3-7723-951d-72c3-8ce023b6d848" [ 926.080606] env[69475]: _type = "Task" [ 926.080606] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.092199] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5201bdb3-7723-951d-72c3-8ce023b6d848, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.348247] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.615s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.348778] env[69475]: DEBUG nova.compute.manager [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 926.354360] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.529s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.375524] env[69475]: DEBUG oslo_concurrency.lockutils [req-1ac546a4-a826-413f-b645-5002d4526dd3 req-d2ea7d2d-c668-4743-931d-14543759811e service nova] Releasing lock "refresh_cache-b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.544472] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 926.546069] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 926.546069] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 926.546069] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 926.546069] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 926.546069] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 926.546821] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 926.547155] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 926.547489] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 926.547823] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 926.549590] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 926.554508] env[69475]: DEBUG oslo_vmware.rw_handles [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Completed reading data from the image iterator. {{(pid=69475) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 926.555058] env[69475]: DEBUG oslo_vmware.rw_handles [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527445b6-5e8c-a9bf-1fdc-e04bbec2903a/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 926.555464] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a68922e5-8fbd-4380-928e-bd1ed08c0a5d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.569985] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b3a347d-3eb2-44c0-9181-d917fdbb5c7f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.580265] env[69475]: DEBUG oslo_vmware.rw_handles [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527445b6-5e8c-a9bf-1fdc-e04bbec2903a/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 926.580559] env[69475]: DEBUG oslo_vmware.rw_handles [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527445b6-5e8c-a9bf-1fdc-e04bbec2903a/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 926.582032] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-22edbc1a-d30e-4627-8cf4-43d806cb5030 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.584512] env[69475]: DEBUG oslo_vmware.api [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 926.584512] env[69475]: value = "task-3508385" [ 926.584512] env[69475]: _type = "Task" [ 926.584512] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.599520] env[69475]: DEBUG nova.virt.hardware [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 926.599910] env[69475]: DEBUG nova.virt.hardware [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 926.603056] env[69475]: DEBUG nova.virt.hardware [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 926.603056] env[69475]: DEBUG nova.virt.hardware [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 926.603056] env[69475]: DEBUG nova.virt.hardware [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 926.603056] env[69475]: DEBUG nova.virt.hardware [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 926.603056] env[69475]: DEBUG nova.virt.hardware [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 926.603056] env[69475]: DEBUG nova.virt.hardware [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 926.603056] env[69475]: DEBUG nova.virt.hardware [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 926.603056] env[69475]: DEBUG nova.virt.hardware [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 926.603056] env[69475]: DEBUG nova.virt.hardware [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 926.603056] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96970f6b-8a9a-49e2-93d7-803a2519e5fa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.618710] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5201bdb3-7723-951d-72c3-8ce023b6d848, 'name': SearchDatastore_Task, 'duration_secs': 0.010081} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.620704] env[69475]: DEBUG oslo_vmware.api [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508385, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.625589] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9d818e7-edb3-40db-95e7-0d2d477d31a2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.633380] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a4d233c-3b7c-45d6-a8c4-ef5ed3db0fad {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.639974] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 926.639974] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52af3a31-8b1a-2c90-9b47-62fa272d9c62" [ 926.639974] env[69475]: _type = "Task" [ 926.639974] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.661925] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52af3a31-8b1a-2c90-9b47-62fa272d9c62, 'name': SearchDatastore_Task, 'duration_secs': 0.011027} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.662207] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.662453] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] b8c50d0a-4b3d-4b70-9bd6-8304fa128e59/b8c50d0a-4b3d-4b70-9bd6-8304fa128e59.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 926.662715] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f0e4c134-22e6-42dd-a41a-7d6c37ab4c9b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.669732] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 926.669732] env[69475]: value = "task-3508386" [ 926.669732] env[69475]: _type = "Task" [ 926.669732] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.681112] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508386, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.818400] env[69475]: DEBUG oslo_vmware.rw_handles [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527445b6-5e8c-a9bf-1fdc-e04bbec2903a/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 926.818400] env[69475]: INFO nova.virt.vmwareapi.images [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Downloaded image file data 04a88c3d-f91d-41ae-b78d-8f3d116adc4c [ 926.819269] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ecea02f-7c7a-420d-8fc8-b065cc7650f3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.843442] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a3c72b0-941a-416d-b24c-2ee0da63cab5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.860240] env[69475]: DEBUG nova.compute.utils [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 926.864178] env[69475]: DEBUG nova.compute.manager [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 926.864178] env[69475]: DEBUG nova.network.neutron [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 926.891565] env[69475]: INFO nova.virt.vmwareapi.images [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] The imported VM was unregistered [ 926.893358] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Caching image {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 926.893597] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Creating directory with path [datastore1] devstack-image-cache_base/04a88c3d-f91d-41ae-b78d-8f3d116adc4c {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 926.894221] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f1146f99-674c-4faa-880a-feb6f98e30a3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.908627] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Created directory with path [datastore1] devstack-image-cache_base/04a88c3d-f91d-41ae-b78d-8f3d116adc4c {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 926.908822] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_8fa5e978-4e3c-4dbe-b209-41af95560766/OSTACK_IMG_8fa5e978-4e3c-4dbe-b209-41af95560766.vmdk to [datastore1] devstack-image-cache_base/04a88c3d-f91d-41ae-b78d-8f3d116adc4c/04a88c3d-f91d-41ae-b78d-8f3d116adc4c.vmdk. {{(pid=69475) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 926.909768] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-598b9afe-2445-410f-9700-9f24384d6ec1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.917177] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 926.917177] env[69475]: value = "task-3508388" [ 926.917177] env[69475]: _type = "Task" [ 926.917177] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.928563] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508388, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.946735] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff30d743-ab60-49b6-a25e-eae4f95782cb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.956278] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d788371a-8ee4-46ee-8c5c-8e508f602f98 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.994817] env[69475]: DEBUG nova.policy [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38eb8888c100488db7e0e04f1f50c104', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4d8b5413bb2444538234a0c37633c89f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 926.997024] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d3822b-ed9e-47e3-b619-d6faed385cbf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.007870] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a05a3d9-1ea6-4b6f-a095-32ff2d8a026e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.027328] env[69475]: DEBUG nova.compute.provider_tree [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.102053] env[69475]: DEBUG oslo_vmware.api [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508385, 'name': ReconfigVM_Task, 'duration_secs': 0.273632} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.102681] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Updating instance 'b1b04eb9-ded6-4425-8a06-0c26c086a09b' progress to 33 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 927.126927] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "78430e6a-b0a3-400b-91c4-effea838274a" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.128865] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "78430e6a-b0a3-400b-91c4-effea838274a" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.128865] env[69475]: INFO nova.compute.manager [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Shelving [ 927.181561] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508386, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.270992] env[69475]: DEBUG nova.network.neutron [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Successfully updated port: 02e51603-f6e4-43c3-9a38-465fab447405 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 927.365681] env[69475]: DEBUG nova.compute.manager [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 927.431469] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508388, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.438688] env[69475]: DEBUG nova.compute.manager [req-22256f64-52eb-42a5-b493-83bf47d8498f req-b5cbb1d2-5a4f-4acd-83d6-5781772227d2 service nova] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Received event network-vif-plugged-02e51603-f6e4-43c3-9a38-465fab447405 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 927.438945] env[69475]: DEBUG oslo_concurrency.lockutils [req-22256f64-52eb-42a5-b493-83bf47d8498f req-b5cbb1d2-5a4f-4acd-83d6-5781772227d2 service nova] Acquiring lock "78b5496c-f8e2-4681-a36b-50897b0f7325-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.439168] env[69475]: DEBUG oslo_concurrency.lockutils [req-22256f64-52eb-42a5-b493-83bf47d8498f req-b5cbb1d2-5a4f-4acd-83d6-5781772227d2 service nova] Lock "78b5496c-f8e2-4681-a36b-50897b0f7325-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.439329] env[69475]: DEBUG oslo_concurrency.lockutils [req-22256f64-52eb-42a5-b493-83bf47d8498f req-b5cbb1d2-5a4f-4acd-83d6-5781772227d2 service nova] Lock "78b5496c-f8e2-4681-a36b-50897b0f7325-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.440150] env[69475]: DEBUG nova.compute.manager [req-22256f64-52eb-42a5-b493-83bf47d8498f req-b5cbb1d2-5a4f-4acd-83d6-5781772227d2 service nova] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] No waiting events found dispatching network-vif-plugged-02e51603-f6e4-43c3-9a38-465fab447405 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 927.440150] env[69475]: WARNING nova.compute.manager [req-22256f64-52eb-42a5-b493-83bf47d8498f req-b5cbb1d2-5a4f-4acd-83d6-5781772227d2 service nova] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Received unexpected event network-vif-plugged-02e51603-f6e4-43c3-9a38-465fab447405 for instance with vm_state building and task_state spawning. [ 927.529033] env[69475]: DEBUG nova.scheduler.client.report [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 927.613498] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:39:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='2d6dab00-d5b5-4904-b4d4-4a46ef0cf4b2',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2018768159',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 927.614149] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 927.614422] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 927.614678] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 927.614863] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 927.615071] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 927.615589] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 927.615849] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 927.616264] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 927.616509] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 927.616724] env[69475]: DEBUG nova.virt.hardware [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 927.624697] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Reconfiguring VM instance instance-00000046 to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 927.625542] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5cb40d57-abf5-4bd8-847d-8ce5cb4479c0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.650036] env[69475]: DEBUG nova.network.neutron [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Successfully created port: 04c0ec8b-7341-4495-9aa9-5edcc8fd816a {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 927.656323] env[69475]: DEBUG oslo_vmware.api [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 927.656323] env[69475]: value = "task-3508389" [ 927.656323] env[69475]: _type = "Task" [ 927.656323] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.669450] env[69475]: DEBUG oslo_vmware.api [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508389, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.683233] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508386, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.571166} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.683233] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] b8c50d0a-4b3d-4b70-9bd6-8304fa128e59/b8c50d0a-4b3d-4b70-9bd6-8304fa128e59.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 927.683438] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 927.683699] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fb30d639-7501-4dd9-8e7b-aa32b2f9d715 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.694503] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 927.694503] env[69475]: value = "task-3508390" [ 927.694503] env[69475]: _type = "Task" [ 927.694503] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.704609] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508390, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.774406] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Acquiring lock "refresh_cache-78b5496c-f8e2-4681-a36b-50897b0f7325" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.774584] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Acquired lock "refresh_cache-78b5496c-f8e2-4681-a36b-50897b0f7325" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.774796] env[69475]: DEBUG nova.network.neutron [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 927.935492] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508388, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.037137] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.682s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 928.037137] env[69475]: INFO nova.compute.manager [None req-f06ee48d-3b14-426e-8e07-3a06972eac19 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Successfully reverted task state from rebuilding on failure for instance. [ 928.043292] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.288s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.044825] env[69475]: INFO nova.compute.claims [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 928.151921] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 928.152414] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d472fa97-e13e-43d7-9dcb-079b00a45728 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.164790] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 928.164790] env[69475]: value = "task-3508391" [ 928.164790] env[69475]: _type = "Task" [ 928.164790] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.169982] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "41ddf915-343b-46e4-834e-11ab3899242f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.170091] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "41ddf915-343b-46e4-834e-11ab3899242f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.183119] env[69475]: DEBUG oslo_vmware.api [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508389, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.191244] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508391, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.204540] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508390, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.347001} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.204850] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 928.205955] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29d1c56-ee86-43a0-8278-ad7bebaf56bd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.249535] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] b8c50d0a-4b3d-4b70-9bd6-8304fa128e59/b8c50d0a-4b3d-4b70-9bd6-8304fa128e59.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 928.250182] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-620bf9e8-34b8-4057-8856-afe191d932c2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.290484] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 928.290484] env[69475]: value = "task-3508392" [ 928.290484] env[69475]: _type = "Task" [ 928.290484] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.305169] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508392, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.354243] env[69475]: DEBUG nova.network.neutron [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 928.378728] env[69475]: DEBUG nova.compute.manager [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 928.422540] env[69475]: DEBUG nova.virt.hardware [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 928.422941] env[69475]: DEBUG nova.virt.hardware [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 928.423231] env[69475]: DEBUG nova.virt.hardware [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 928.423467] env[69475]: DEBUG nova.virt.hardware [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 928.423690] env[69475]: DEBUG nova.virt.hardware [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 928.423920] env[69475]: DEBUG nova.virt.hardware [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 928.424430] env[69475]: DEBUG nova.virt.hardware [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 928.424512] env[69475]: DEBUG nova.virt.hardware [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 928.424766] env[69475]: DEBUG nova.virt.hardware [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 928.425073] env[69475]: DEBUG nova.virt.hardware [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 928.425315] env[69475]: DEBUG nova.virt.hardware [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 928.426794] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0993502a-318c-4164-ad87-a284e85bdd12 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.448400] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e25bfd1d-7ed1-4e79-bd2d-2660eeec9f03 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.455244] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508388, 'name': MoveVirtualDisk_Task} progress is 52%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.670174] env[69475]: DEBUG oslo_vmware.api [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508389, 'name': ReconfigVM_Task, 'duration_secs': 0.608473} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.673721] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Reconfigured VM instance instance-00000046 to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 928.674190] env[69475]: DEBUG nova.compute.manager [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 928.681146] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7713d9c-7c7e-4ec0-a531-077ad7ac59b6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.689730] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9051b22f-659e-481a-a4f7-705c1e9ffa87 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "f40aa0bb-af1d-4f8f-a906-f1c83307b465" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.689730] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9051b22f-659e-481a-a4f7-705c1e9ffa87 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "f40aa0bb-af1d-4f8f-a906-f1c83307b465" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.689730] env[69475]: DEBUG nova.compute.manager [None req-9051b22f-659e-481a-a4f7-705c1e9ffa87 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 928.702476] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b864e0c6-2074-4657-9d8e-57fa949a95b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.715224] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] b1b04eb9-ded6-4425-8a06-0c26c086a09b/b1b04eb9-ded6-4425-8a06-0c26c086a09b.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 928.717757] env[69475]: DEBUG nova.network.neutron [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Updating instance_info_cache with network_info: [{"id": "02e51603-f6e4-43c3-9a38-465fab447405", "address": "fa:16:3e:ad:c5:52", "network": {"id": "e0cb349d-be7a-4e59-938c-a80f2ca290bd", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1049376404-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ab334f7e3de413eb9fb934d82951ccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bafe8721-91d4-4127-b215-d9e8e27947dc", "external-id": "nsx-vlan-transportzone-680", "segmentation_id": 680, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02e51603-f6", "ovs_interfaceid": "02e51603-f6e4-43c3-9a38-465fab447405", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.719288] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e8e7ea7-6588-4ae8-af43-6daf5dbaa515 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.732930] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508391, 'name': PowerOffVM_Task, 'duration_secs': 0.247048} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.734837] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 928.736161] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c296858-cfb4-47ae-bed6-53dd7187035a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.741802] env[69475]: DEBUG nova.compute.manager [None req-9051b22f-659e-481a-a4f7-705c1e9ffa87 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69475) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 928.742452] env[69475]: DEBUG nova.objects.instance [None req-9051b22f-659e-481a-a4f7-705c1e9ffa87 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lazy-loading 'flavor' on Instance uuid f40aa0bb-af1d-4f8f-a906-f1c83307b465 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 928.746098] env[69475]: DEBUG oslo_vmware.api [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 928.746098] env[69475]: value = "task-3508393" [ 928.746098] env[69475]: _type = "Task" [ 928.746098] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.767692] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bab1d859-f761-40e3-b7b5-354e0678b9bb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.774863] env[69475]: DEBUG oslo_vmware.api [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508393, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.801461] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508392, 'name': ReconfigVM_Task, 'duration_secs': 0.434198} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.801740] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Reconfigured VM instance instance-0000004e to attach disk [datastore1] b8c50d0a-4b3d-4b70-9bd6-8304fa128e59/b8c50d0a-4b3d-4b70-9bd6-8304fa128e59.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 928.802501] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a9aab46e-9eb4-41b9-a436-3eb87544d779 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.811371] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 928.811371] env[69475]: value = "task-3508394" [ 928.811371] env[69475]: _type = "Task" [ 928.811371] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.823027] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508394, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.939983] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508388, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.198711] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.238289] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Releasing lock "refresh_cache-78b5496c-f8e2-4681-a36b-50897b0f7325" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.238289] env[69475]: DEBUG nova.compute.manager [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Instance network_info: |[{"id": "02e51603-f6e4-43c3-9a38-465fab447405", "address": "fa:16:3e:ad:c5:52", "network": {"id": "e0cb349d-be7a-4e59-938c-a80f2ca290bd", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1049376404-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ab334f7e3de413eb9fb934d82951ccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bafe8721-91d4-4127-b215-d9e8e27947dc", "external-id": "nsx-vlan-transportzone-680", "segmentation_id": 680, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02e51603-f6", "ovs_interfaceid": "02e51603-f6e4-43c3-9a38-465fab447405", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 929.238289] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ad:c5:52', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bafe8721-91d4-4127-b215-d9e8e27947dc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '02e51603-f6e4-43c3-9a38-465fab447405', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 929.245568] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Creating folder: Project (5ab334f7e3de413eb9fb934d82951ccb). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 929.250682] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d515808-26c5-4644-a654-c7ff049e6b45 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.270115] env[69475]: DEBUG oslo_vmware.api [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508393, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.275825] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Created folder: Project (5ab334f7e3de413eb9fb934d82951ccb) in parent group-v700823. [ 929.276523] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Creating folder: Instances. Parent ref: group-v701037. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 929.277180] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-29acce56-010c-4220-b016-4105b5a55b48 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.285967] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 929.286624] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-99739703-9378-4227-a2b8-413b3d0610a5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.296163] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Created folder: Instances in parent group-v701037. [ 929.296734] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 929.300510] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 929.300510] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13a14825-d36e-4548-b1c0-1368e171c354 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.323865] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 929.323865] env[69475]: value = "task-3508397" [ 929.323865] env[69475]: _type = "Task" [ 929.323865] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.336972] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508397, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.344311] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508394, 'name': Rename_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.344701] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 929.344701] env[69475]: value = "task-3508398" [ 929.344701] env[69475]: _type = "Task" [ 929.344701] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.355616] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508398, 'name': CreateVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.440579] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508388, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.554801] env[69475]: DEBUG nova.compute.manager [req-a3d94cd9-a602-4b9d-8ca2-34a03d54311a req-71e6491e-b131-4c90-93d4-78b79a16b1ee service nova] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Received event network-changed-02e51603-f6e4-43c3-9a38-465fab447405 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 929.554801] env[69475]: DEBUG nova.compute.manager [req-a3d94cd9-a602-4b9d-8ca2-34a03d54311a req-71e6491e-b131-4c90-93d4-78b79a16b1ee service nova] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Refreshing instance network info cache due to event network-changed-02e51603-f6e4-43c3-9a38-465fab447405. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 929.555016] env[69475]: DEBUG oslo_concurrency.lockutils [req-a3d94cd9-a602-4b9d-8ca2-34a03d54311a req-71e6491e-b131-4c90-93d4-78b79a16b1ee service nova] Acquiring lock "refresh_cache-78b5496c-f8e2-4681-a36b-50897b0f7325" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.555132] env[69475]: DEBUG oslo_concurrency.lockutils [req-a3d94cd9-a602-4b9d-8ca2-34a03d54311a req-71e6491e-b131-4c90-93d4-78b79a16b1ee service nova] Acquired lock "refresh_cache-78b5496c-f8e2-4681-a36b-50897b0f7325" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.555298] env[69475]: DEBUG nova.network.neutron [req-a3d94cd9-a602-4b9d-8ca2-34a03d54311a req-71e6491e-b131-4c90-93d4-78b79a16b1ee service nova] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Refreshing network info cache for port 02e51603-f6e4-43c3-9a38-465fab447405 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 929.576797] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad24fc52-f5b4-4092-b5bd-38729acf99d1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.585720] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0dc2e68-3b3a-4369-9b2e-da080066d929 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.620098] env[69475]: DEBUG nova.network.neutron [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Successfully updated port: 04c0ec8b-7341-4495-9aa9-5edcc8fd816a {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 929.622097] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43126a91-6b0f-4e96-afdf-65c8e4704972 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.631347] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10cf0d53-7639-4c87-8429-656e54f43cb4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.647167] env[69475]: DEBUG nova.compute.provider_tree [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 929.761450] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9051b22f-659e-481a-a4f7-705c1e9ffa87 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 929.764844] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b00352a-5773-49d7-b56f-e9a89e357bfd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.766503] env[69475]: DEBUG oslo_vmware.api [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508393, 'name': ReconfigVM_Task, 'duration_secs': 0.875917} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.766774] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Reconfigured VM instance instance-00000046 to attach disk [datastore1] b1b04eb9-ded6-4425-8a06-0c26c086a09b/b1b04eb9-ded6-4425-8a06-0c26c086a09b.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 929.767040] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Updating instance 'b1b04eb9-ded6-4425-8a06-0c26c086a09b' progress to 50 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 929.775447] env[69475]: DEBUG oslo_vmware.api [None req-9051b22f-659e-481a-a4f7-705c1e9ffa87 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 929.775447] env[69475]: value = "task-3508399" [ 929.775447] env[69475]: _type = "Task" [ 929.775447] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.783542] env[69475]: DEBUG oslo_vmware.api [None req-9051b22f-659e-481a-a4f7-705c1e9ffa87 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508399, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.837515] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508394, 'name': Rename_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.842604] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508397, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.854065] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508398, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.939155] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508388, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.686422} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.939335] env[69475]: INFO nova.virt.vmwareapi.ds_util [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_8fa5e978-4e3c-4dbe-b209-41af95560766/OSTACK_IMG_8fa5e978-4e3c-4dbe-b209-41af95560766.vmdk to [datastore1] devstack-image-cache_base/04a88c3d-f91d-41ae-b78d-8f3d116adc4c/04a88c3d-f91d-41ae-b78d-8f3d116adc4c.vmdk. [ 929.939517] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Cleaning up location [datastore1] OSTACK_IMG_8fa5e978-4e3c-4dbe-b209-41af95560766 {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 929.939680] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_8fa5e978-4e3c-4dbe-b209-41af95560766 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 929.939996] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c110053-bd6a-433f-9cf3-dfe342effecb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.947067] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 929.947067] env[69475]: value = "task-3508400" [ 929.947067] env[69475]: _type = "Task" [ 929.947067] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.961238] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508400, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.126469] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Acquiring lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.126469] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Acquired lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.126713] env[69475]: DEBUG nova.network.neutron [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 930.178187] env[69475]: ERROR nova.scheduler.client.report [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [req-7fb19790-759d-4bd4-8646-5b7cd8e1599e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dd221100-68c1-4a75-92b5-b24d81fee5da. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-7fb19790-759d-4bd4-8646-5b7cd8e1599e"}]} [ 930.195658] env[69475]: DEBUG nova.scheduler.client.report [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Refreshing inventories for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 930.220377] env[69475]: DEBUG nova.scheduler.client.report [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Updating ProviderTree inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 930.220644] env[69475]: DEBUG nova.compute.provider_tree [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 930.236593] env[69475]: DEBUG nova.scheduler.client.report [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Refreshing aggregate associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, aggregates: None {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 930.264055] env[69475]: DEBUG nova.scheduler.client.report [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Refreshing trait associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 930.273279] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ae514c-5fa4-4367-86b1-2b4a292b1abc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.286143] env[69475]: DEBUG oslo_vmware.api [None req-9051b22f-659e-481a-a4f7-705c1e9ffa87 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508399, 'name': PowerOffVM_Task, 'duration_secs': 0.240471} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.301215] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9051b22f-659e-481a-a4f7-705c1e9ffa87 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 930.301519] env[69475]: DEBUG nova.compute.manager [None req-9051b22f-659e-481a-a4f7-705c1e9ffa87 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 930.307242] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d156838e-9b2c-40db-b36f-dc44db1bda1d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.310231] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1686c3f-6f09-472b-b823-cb7cb611b763 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.333889] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Updating instance 'b1b04eb9-ded6-4425-8a06-0c26c086a09b' progress to 67 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 930.356725] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508394, 'name': Rename_Task, 'duration_secs': 1.178077} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.357067] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508397, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.358461] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 930.358767] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d074bbb-6cd0-4686-bf26-0875c6bffa14 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.368744] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508398, 'name': CreateVM_Task, 'duration_secs': 1.024483} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.369732] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 930.370475] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.370642] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.371037] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 930.371584] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ca8437f-2c7d-403d-b6ee-7e67d7892d61 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.374533] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 930.374533] env[69475]: value = "task-3508401" [ 930.374533] env[69475]: _type = "Task" [ 930.374533] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.379660] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Waiting for the task: (returnval){ [ 930.379660] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52baaac9-aeb2-5e13-8323-3de504499d38" [ 930.379660] env[69475]: _type = "Task" [ 930.379660] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.389939] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508401, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.396307] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52baaac9-aeb2-5e13-8323-3de504499d38, 'name': SearchDatastore_Task, 'duration_secs': 0.01119} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.399195] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.399471] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 930.399726] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.399890] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 930.400141] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 930.400882] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2331ae60-9b08-4587-874f-d9e8ea4caa41 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.410523] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 930.410838] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 930.411554] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-107f9014-005b-4869-af92-c3ba3ebaba0d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.423978] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Waiting for the task: (returnval){ [ 930.423978] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52deae8e-8e88-d740-f219-6e2ec46003f8" [ 930.423978] env[69475]: _type = "Task" [ 930.423978] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.435586] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52deae8e-8e88-d740-f219-6e2ec46003f8, 'name': SearchDatastore_Task, 'duration_secs': 0.009603} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.436539] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8d008f0-51f4-4795-bdbf-0f1f74bb4818 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.441930] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Waiting for the task: (returnval){ [ 930.441930] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526c2348-90b2-44e5-489e-6c65c08a0d2d" [ 930.441930] env[69475]: _type = "Task" [ 930.441930] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.453921] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526c2348-90b2-44e5-489e-6c65c08a0d2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.458603] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508400, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063381} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.461228] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 930.461411] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Releasing lock "[datastore1] devstack-image-cache_base/04a88c3d-f91d-41ae-b78d-8f3d116adc4c/04a88c3d-f91d-41ae-b78d-8f3d116adc4c.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.461654] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/04a88c3d-f91d-41ae-b78d-8f3d116adc4c/04a88c3d-f91d-41ae-b78d-8f3d116adc4c.vmdk to [datastore1] 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc/2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 930.462456] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87cce29d-4537-40e3-893a-11ee431f18d9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.468268] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 930.468268] env[69475]: value = "task-3508402" [ 930.468268] env[69475]: _type = "Task" [ 930.468268] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.472386] env[69475]: DEBUG nova.network.neutron [req-a3d94cd9-a602-4b9d-8ca2-34a03d54311a req-71e6491e-b131-4c90-93d4-78b79a16b1ee service nova] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Updated VIF entry in instance network info cache for port 02e51603-f6e4-43c3-9a38-465fab447405. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 930.472737] env[69475]: DEBUG nova.network.neutron [req-a3d94cd9-a602-4b9d-8ca2-34a03d54311a req-71e6491e-b131-4c90-93d4-78b79a16b1ee service nova] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Updating instance_info_cache with network_info: [{"id": "02e51603-f6e4-43c3-9a38-465fab447405", "address": "fa:16:3e:ad:c5:52", "network": {"id": "e0cb349d-be7a-4e59-938c-a80f2ca290bd", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-1049376404-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5ab334f7e3de413eb9fb934d82951ccb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bafe8721-91d4-4127-b215-d9e8e27947dc", "external-id": "nsx-vlan-transportzone-680", "segmentation_id": 680, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02e51603-f6", "ovs_interfaceid": "02e51603-f6e4-43c3-9a38-465fab447405", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.480638] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508402, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.698369] env[69475]: DEBUG nova.network.neutron [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 930.769658] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-365125d7-c49f-43cf-b124-6b0cb55dc146 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.780519] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d5853b-a329-4e4a-8a71-e5066527505a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.817080] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14966d68-5aca-4e31-ad49-11e112833ab4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.826180] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c539948c-f39a-4882-a7a9-2fe8d2e1a571 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.857823] env[69475]: DEBUG nova.compute.provider_tree [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 930.859772] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9051b22f-659e-481a-a4f7-705c1e9ffa87 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "f40aa0bb-af1d-4f8f-a906-f1c83307b465" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.175s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.866077] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508397, 'name': CreateSnapshot_Task, 'duration_secs': 1.511219} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.866368] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 930.867202] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74fb7a61-0fa0-4cb9-841c-7546d171ac55 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.888932] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508401, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.953150] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526c2348-90b2-44e5-489e-6c65c08a0d2d, 'name': SearchDatastore_Task, 'duration_secs': 0.010263} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.953546] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.953878] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 78b5496c-f8e2-4681-a36b-50897b0f7325/78b5496c-f8e2-4681-a36b-50897b0f7325.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 930.954183] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-643abe60-3728-4ba9-8d54-6d64a752aa88 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.961200] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Waiting for the task: (returnval){ [ 930.961200] env[69475]: value = "task-3508403" [ 930.961200] env[69475]: _type = "Task" [ 930.961200] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.969522] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.974672] env[69475]: DEBUG nova.network.neutron [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Updating instance_info_cache with network_info: [{"id": "04c0ec8b-7341-4495-9aa9-5edcc8fd816a", "address": "fa:16:3e:16:48:72", "network": {"id": "b4898ad4-4e6f-4225-a619-45236df7e9ad", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-809945386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4d8b5413bb2444538234a0c37633c89f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "073f8535-6b3a-4d21-a754-4c975554dcbf", "external-id": "nsx-vlan-transportzone-111", "segmentation_id": 111, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04c0ec8b-73", "ovs_interfaceid": "04c0ec8b-7341-4495-9aa9-5edcc8fd816a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.980042] env[69475]: DEBUG oslo_concurrency.lockutils [req-a3d94cd9-a602-4b9d-8ca2-34a03d54311a req-71e6491e-b131-4c90-93d4-78b79a16b1ee service nova] Releasing lock "refresh_cache-78b5496c-f8e2-4681-a36b-50897b0f7325" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.980042] env[69475]: DEBUG nova.compute.manager [req-a3d94cd9-a602-4b9d-8ca2-34a03d54311a req-71e6491e-b131-4c90-93d4-78b79a16b1ee service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Received event network-vif-plugged-04c0ec8b-7341-4495-9aa9-5edcc8fd816a {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 930.980042] env[69475]: DEBUG oslo_concurrency.lockutils [req-a3d94cd9-a602-4b9d-8ca2-34a03d54311a req-71e6491e-b131-4c90-93d4-78b79a16b1ee service nova] Acquiring lock "02ba199b-a7dc-421c-a14a-b562da275377-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.980270] env[69475]: DEBUG oslo_concurrency.lockutils [req-a3d94cd9-a602-4b9d-8ca2-34a03d54311a req-71e6491e-b131-4c90-93d4-78b79a16b1ee service nova] Lock "02ba199b-a7dc-421c-a14a-b562da275377-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.981363] env[69475]: DEBUG oslo_concurrency.lockutils [req-a3d94cd9-a602-4b9d-8ca2-34a03d54311a req-71e6491e-b131-4c90-93d4-78b79a16b1ee service nova] Lock "02ba199b-a7dc-421c-a14a-b562da275377-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.981363] env[69475]: DEBUG nova.compute.manager [req-a3d94cd9-a602-4b9d-8ca2-34a03d54311a req-71e6491e-b131-4c90-93d4-78b79a16b1ee service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] No waiting events found dispatching network-vif-plugged-04c0ec8b-7341-4495-9aa9-5edcc8fd816a {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 930.981363] env[69475]: WARNING nova.compute.manager [req-a3d94cd9-a602-4b9d-8ca2-34a03d54311a req-71e6491e-b131-4c90-93d4-78b79a16b1ee service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Received unexpected event network-vif-plugged-04c0ec8b-7341-4495-9aa9-5edcc8fd816a for instance with vm_state building and task_state spawning. [ 930.981363] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508402, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.988643] env[69475]: DEBUG nova.network.neutron [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Port f9a10762-ba87-425f-9623-1ffdf22c5bb4 binding to destination host cpu-1 is already ACTIVE {{(pid=69475) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 931.389079] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 931.393021] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c53ed5dd-f812-4af1-a971-dd9d9b7f7f5d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.396635] env[69475]: DEBUG oslo_vmware.api [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508401, 'name': PowerOnVM_Task, 'duration_secs': 0.7337} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.397679] env[69475]: DEBUG nova.scheduler.client.report [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Updated inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da with generation 112 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 931.397911] env[69475]: DEBUG nova.compute.provider_tree [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Updating resource provider dd221100-68c1-4a75-92b5-b24d81fee5da generation from 112 to 113 during operation: update_inventory {{(pid=69475) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 931.398102] env[69475]: DEBUG nova.compute.provider_tree [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 931.404140] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 931.406204] env[69475]: INFO nova.compute.manager [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Took 10.92 seconds to spawn the instance on the hypervisor. [ 931.406499] env[69475]: DEBUG nova.compute.manager [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 931.408363] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fb3437-c751-4fa4-bd62-1a3482f86eab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.416658] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 931.416658] env[69475]: value = "task-3508404" [ 931.416658] env[69475]: _type = "Task" [ 931.416658] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.435606] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508404, 'name': CloneVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.478776] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.485040] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Releasing lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 931.485408] env[69475]: DEBUG nova.compute.manager [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Instance network_info: |[{"id": "04c0ec8b-7341-4495-9aa9-5edcc8fd816a", "address": "fa:16:3e:16:48:72", "network": {"id": "b4898ad4-4e6f-4225-a619-45236df7e9ad", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-809945386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4d8b5413bb2444538234a0c37633c89f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "073f8535-6b3a-4d21-a754-4c975554dcbf", "external-id": "nsx-vlan-transportzone-111", "segmentation_id": 111, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04c0ec8b-73", "ovs_interfaceid": "04c0ec8b-7341-4495-9aa9-5edcc8fd816a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 931.491745] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:48:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '073f8535-6b3a-4d21-a754-4c975554dcbf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '04c0ec8b-7341-4495-9aa9-5edcc8fd816a', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 931.497472] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Creating folder: Project (4d8b5413bb2444538234a0c37633c89f). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 931.501618] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508402, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.507979] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f076f624-fe90-4cc9-bddf-7930cf2f2ef2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.521865] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Created folder: Project (4d8b5413bb2444538234a0c37633c89f) in parent group-v700823. [ 931.523181] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Creating folder: Instances. Parent ref: group-v701042. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 931.523994] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-de6044c5-b627-44f1-bb30-e517a26bc8dd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.539180] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Created folder: Instances in parent group-v701042. [ 931.539645] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 931.539981] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 931.540419] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bfb13281-d689-405c-a0f5-c935305a2ac4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.564743] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 931.564743] env[69475]: value = "task-3508407" [ 931.564743] env[69475]: _type = "Task" [ 931.564743] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.576880] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508407, 'name': CreateVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.702429] env[69475]: DEBUG nova.compute.manager [req-8337b25c-2fb5-44dc-957c-30e5f95abba0 req-88a169f7-5b53-4588-9676-f8ce90bc8802 service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Received event network-changed-04c0ec8b-7341-4495-9aa9-5edcc8fd816a {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 931.702429] env[69475]: DEBUG nova.compute.manager [req-8337b25c-2fb5-44dc-957c-30e5f95abba0 req-88a169f7-5b53-4588-9676-f8ce90bc8802 service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Refreshing instance network info cache due to event network-changed-04c0ec8b-7341-4495-9aa9-5edcc8fd816a. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 931.703775] env[69475]: DEBUG oslo_concurrency.lockutils [req-8337b25c-2fb5-44dc-957c-30e5f95abba0 req-88a169f7-5b53-4588-9676-f8ce90bc8802 service nova] Acquiring lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.703775] env[69475]: DEBUG oslo_concurrency.lockutils [req-8337b25c-2fb5-44dc-957c-30e5f95abba0 req-88a169f7-5b53-4588-9676-f8ce90bc8802 service nova] Acquired lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 931.703775] env[69475]: DEBUG nova.network.neutron [req-8337b25c-2fb5-44dc-957c-30e5f95abba0 req-88a169f7-5b53-4588-9676-f8ce90bc8802 service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Refreshing network info cache for port 04c0ec8b-7341-4495-9aa9-5edcc8fd816a {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 931.845996] env[69475]: DEBUG nova.objects.instance [None req-0418d910-65f5-4ac3-9586-9869c915c631 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lazy-loading 'flavor' on Instance uuid f40aa0bb-af1d-4f8f-a906-f1c83307b465 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 931.908123] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.865s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.908871] env[69475]: DEBUG nova.compute.manager [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 931.911700] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.306s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.911946] env[69475]: DEBUG nova.objects.instance [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Lazy-loading 'resources' on Instance uuid 0a65565c-c679-47e5-8606-832fe3876af6 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 931.929621] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508404, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.937714] env[69475]: INFO nova.compute.manager [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Took 29.08 seconds to build instance. [ 931.978039] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508403, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.985093] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508402, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.030482] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "b1b04eb9-ded6-4425-8a06-0c26c086a09b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 932.030670] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "b1b04eb9-ded6-4425-8a06-0c26c086a09b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.030724] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "b1b04eb9-ded6-4425-8a06-0c26c086a09b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.079408] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508407, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.360451] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0418d910-65f5-4ac3-9586-9869c915c631 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "refresh_cache-f40aa0bb-af1d-4f8f-a906-f1c83307b465" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.360696] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0418d910-65f5-4ac3-9586-9869c915c631 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquired lock "refresh_cache-f40aa0bb-af1d-4f8f-a906-f1c83307b465" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 932.360894] env[69475]: DEBUG nova.network.neutron [None req-0418d910-65f5-4ac3-9586-9869c915c631 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 932.361159] env[69475]: DEBUG nova.objects.instance [None req-0418d910-65f5-4ac3-9586-9869c915c631 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lazy-loading 'info_cache' on Instance uuid f40aa0bb-af1d-4f8f-a906-f1c83307b465 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 932.415869] env[69475]: DEBUG nova.compute.utils [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 932.425734] env[69475]: DEBUG nova.compute.manager [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 932.426085] env[69475]: DEBUG nova.network.neutron [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 932.440804] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7cf4cad6-082b-487e-b759-f22d33920f88 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.595s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.446559] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508404, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.489829] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508403, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.497821] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508402, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.546941] env[69475]: DEBUG nova.policy [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b5a3e08ca1e24e328f386127d394fd96', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '13c35dc0fd1c42b083d2a3c7070ed230', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 932.578465] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508407, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.590419] env[69475]: DEBUG oslo_vmware.rw_handles [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529a88e4-f45d-29ab-54ab-b5dd31b2b97b/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 932.591487] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5359ed76-7a45-41c8-9a9e-3d6ed287de6a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.598053] env[69475]: DEBUG oslo_vmware.rw_handles [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529a88e4-f45d-29ab-54ab-b5dd31b2b97b/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 932.598279] env[69475]: ERROR oslo_vmware.rw_handles [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529a88e4-f45d-29ab-54ab-b5dd31b2b97b/disk-0.vmdk due to incomplete transfer. [ 932.598479] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-fdf35a5e-43a7-41ce-b4c4-778bf7d1f0be {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.606269] env[69475]: DEBUG oslo_vmware.rw_handles [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/529a88e4-f45d-29ab-54ab-b5dd31b2b97b/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 932.606524] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Uploaded image 079770cf-a859-4f7a-ae7c-ef25478face9 to the Glance image server {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 932.609059] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 932.609340] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2645a6a5-c059-4a0f-bd7c-5036f86521fb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.618439] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 932.618439] env[69475]: value = "task-3508408" [ 932.618439] env[69475]: _type = "Task" [ 932.618439] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.627767] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508408, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.777947] env[69475]: DEBUG nova.network.neutron [req-8337b25c-2fb5-44dc-957c-30e5f95abba0 req-88a169f7-5b53-4588-9676-f8ce90bc8802 service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Updated VIF entry in instance network info cache for port 04c0ec8b-7341-4495-9aa9-5edcc8fd816a. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 932.778400] env[69475]: DEBUG nova.network.neutron [req-8337b25c-2fb5-44dc-957c-30e5f95abba0 req-88a169f7-5b53-4588-9676-f8ce90bc8802 service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Updating instance_info_cache with network_info: [{"id": "04c0ec8b-7341-4495-9aa9-5edcc8fd816a", "address": "fa:16:3e:16:48:72", "network": {"id": "b4898ad4-4e6f-4225-a619-45236df7e9ad", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-809945386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4d8b5413bb2444538234a0c37633c89f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "073f8535-6b3a-4d21-a754-4c975554dcbf", "external-id": "nsx-vlan-transportzone-111", "segmentation_id": 111, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04c0ec8b-73", "ovs_interfaceid": "04c0ec8b-7341-4495-9aa9-5edcc8fd816a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.865535] env[69475]: DEBUG nova.objects.base [None req-0418d910-65f5-4ac3-9586-9869c915c631 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 932.932050] env[69475]: DEBUG nova.compute.manager [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 932.953872] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508404, 'name': CloneVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.956317] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ab56ec-4b21-49d0-aee6-c00c4993a351 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.965329] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef148d0a-cb4c-4bce-9eec-8e71c7ec38d1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.017243] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508403, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.018589] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d68739-c80c-4c6e-9742-04ee202c6139 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.026356] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508402, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.465353} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.027111] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/04a88c3d-f91d-41ae-b78d-8f3d116adc4c/04a88c3d-f91d-41ae-b78d-8f3d116adc4c.vmdk to [datastore1] 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc/2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 933.028144] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501f0e34-da9f-46cd-895c-3aa3437f381a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.035464] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40857d67-be50-454c-9a75-d48a573587cb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.062724] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc/2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc.vmdk or device None with type streamOptimized {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 933.064163] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6e8e032-c645-4cf6-81e1-735ca62f70e9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.088517] env[69475]: DEBUG nova.compute.provider_tree [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 933.102085] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508407, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.103710] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 933.103710] env[69475]: value = "task-3508409" [ 933.103710] env[69475]: _type = "Task" [ 933.103710] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.114663] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508409, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.127908] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508408, 'name': Destroy_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.166302] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "refresh_cache-b1b04eb9-ded6-4425-8a06-0c26c086a09b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.166506] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquired lock "refresh_cache-b1b04eb9-ded6-4425-8a06-0c26c086a09b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.166690] env[69475]: DEBUG nova.network.neutron [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 933.281845] env[69475]: DEBUG oslo_concurrency.lockutils [req-8337b25c-2fb5-44dc-957c-30e5f95abba0 req-88a169f7-5b53-4588-9676-f8ce90bc8802 service nova] Releasing lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 933.325893] env[69475]: DEBUG nova.network.neutron [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Successfully created port: 409b371c-7a12-4772-a463-d5e8bc596b60 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 933.446025] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508404, 'name': CloneVM_Task, 'duration_secs': 1.615638} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.446025] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Created linked-clone VM from snapshot [ 933.446025] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-018241f8-bac7-4dbf-ba69-07f24c00210c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.457263] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Uploading image fc07f6a7-0a66-45fb-9298-51f7f2cb2e41 {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 933.478690] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508403, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.215555} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.479208] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 78b5496c-f8e2-4681-a36b-50897b0f7325/78b5496c-f8e2-4681-a36b-50897b0f7325.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 933.479615] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 933.481914] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-254fb1ce-023d-4d71-bc3d-352b4c677063 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.488023] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Waiting for the task: (returnval){ [ 933.488023] env[69475]: value = "task-3508410" [ 933.488023] env[69475]: _type = "Task" [ 933.488023] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.498861] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508410, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.501402] env[69475]: DEBUG oslo_vmware.rw_handles [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 933.501402] env[69475]: value = "vm-701041" [ 933.501402] env[69475]: _type = "VirtualMachine" [ 933.501402] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 933.501974] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-c33879ce-e7d4-4dde-b3d6-2431ed7d8906 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.509087] env[69475]: DEBUG oslo_vmware.rw_handles [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lease: (returnval){ [ 933.509087] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523ed6ec-0abc-6374-33a3-5d2757c0c37c" [ 933.509087] env[69475]: _type = "HttpNfcLease" [ 933.509087] env[69475]: } obtained for exporting VM: (result){ [ 933.509087] env[69475]: value = "vm-701041" [ 933.509087] env[69475]: _type = "VirtualMachine" [ 933.509087] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 933.511099] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the lease: (returnval){ [ 933.511099] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523ed6ec-0abc-6374-33a3-5d2757c0c37c" [ 933.511099] env[69475]: _type = "HttpNfcLease" [ 933.511099] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 933.517601] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 933.517601] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523ed6ec-0abc-6374-33a3-5d2757c0c37c" [ 933.517601] env[69475]: _type = "HttpNfcLease" [ 933.517601] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 933.601024] env[69475]: DEBUG nova.scheduler.client.report [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 933.601024] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508407, 'name': CreateVM_Task, 'duration_secs': 1.678648} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.601024] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 933.602326] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.602691] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.603158] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 933.603772] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ad14199-b774-4ce4-ab53-81889bfc4d6c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.614132] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508409, 'name': ReconfigVM_Task, 'duration_secs': 0.367781} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.615545] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Reconfigured VM instance instance-0000004d to attach disk [datastore1] 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc/2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc.vmdk or device None with type streamOptimized {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 933.616453] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for the task: (returnval){ [ 933.616453] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520a8d1d-5f75-7743-d36e-fd9cd1e1371d" [ 933.616453] env[69475]: _type = "Task" [ 933.616453] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.617515] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a3552c02-f2e3-4dd9-bd56-c88e5ab4faf2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.628555] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 933.628555] env[69475]: value = "task-3508412" [ 933.628555] env[69475]: _type = "Task" [ 933.628555] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.636691] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520a8d1d-5f75-7743-d36e-fd9cd1e1371d, 'name': SearchDatastore_Task, 'duration_secs': 0.009617} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.637360] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508408, 'name': Destroy_Task, 'duration_secs': 0.705988} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.643284] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 933.643698] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 933.646081] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.646081] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.646081] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 933.646081] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Destroyed the VM [ 933.646081] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 933.646081] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb405bb8-8e42-4434-b6c4-5bfe3bfce60b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.647729] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-64186ddb-fc61-45ee-a10e-fba35988c626 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.656792] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508412, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.659221] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 933.662487] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 933.662487] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 933.662487] env[69475]: value = "task-3508413" [ 933.662487] env[69475]: _type = "Task" [ 933.662487] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.662487] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3dbdf423-6f16-4031-bb59-9886e72800dd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.675024] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for the task: (returnval){ [ 933.675024] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e61032-37cd-c2ac-0dda-2877395476e6" [ 933.675024] env[69475]: _type = "Task" [ 933.675024] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.681245] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508413, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.688282] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e61032-37cd-c2ac-0dda-2877395476e6, 'name': SearchDatastore_Task, 'duration_secs': 0.009742} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.689496] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5028a7d2-3896-4f86-8746-34675b280416 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.695675] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for the task: (returnval){ [ 933.695675] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527378c6-7278-4c51-4509-920b389fe542" [ 933.695675] env[69475]: _type = "Task" [ 933.695675] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.705326] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527378c6-7278-4c51-4509-920b389fe542, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.914050] env[69475]: DEBUG nova.network.neutron [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Updating instance_info_cache with network_info: [{"id": "f9a10762-ba87-425f-9623-1ffdf22c5bb4", "address": "fa:16:3e:83:ce:8a", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.106", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9a10762-ba", "ovs_interfaceid": "f9a10762-ba87-425f-9623-1ffdf22c5bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.916819] env[69475]: DEBUG nova.network.neutron [None req-0418d910-65f5-4ac3-9586-9869c915c631 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Updating instance_info_cache with network_info: [{"id": "277b3f9d-a1c5-4f1b-be8a-4818987fd78e", "address": "fa:16:3e:94:d7:8c", "network": {"id": "6fbdc01c-f41b-4684-9238-39afb8859a2b", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1974902263-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "72b480b7835d47a18d77bfe4a983f017", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "72781990-3cb3-42eb-9eb1-4040dedbf66f", "external-id": "cl2-zone-812", "segmentation_id": 812, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap277b3f9d-a1", "ovs_interfaceid": "277b3f9d-a1c5-4f1b-be8a-4818987fd78e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.954684] env[69475]: DEBUG nova.compute.manager [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 933.989045] env[69475]: DEBUG nova.virt.hardware [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 933.989045] env[69475]: DEBUG nova.virt.hardware [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 933.989045] env[69475]: DEBUG nova.virt.hardware [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 933.989388] env[69475]: DEBUG nova.virt.hardware [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 933.989684] env[69475]: DEBUG nova.virt.hardware [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 933.989976] env[69475]: DEBUG nova.virt.hardware [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 933.990391] env[69475]: DEBUG nova.virt.hardware [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 933.990721] env[69475]: DEBUG nova.virt.hardware [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 933.991072] env[69475]: DEBUG nova.virt.hardware [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 933.991386] env[69475]: DEBUG nova.virt.hardware [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 933.993079] env[69475]: DEBUG nova.virt.hardware [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 933.993079] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f5c6e4-9e60-41b1-b6dd-55d84ab13a8c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.009438] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027d29d6-f366-4587-88ae-b6dd811b9a48 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.017373] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508410, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062145} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.017373] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 934.021334] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fbeacf9-81d7-465f-8af6-d0f2395b16d1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.036617] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 934.036617] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523ed6ec-0abc-6374-33a3-5d2757c0c37c" [ 934.036617] env[69475]: _type = "HttpNfcLease" [ 934.036617] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 934.047376] env[69475]: DEBUG oslo_vmware.rw_handles [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 934.047376] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523ed6ec-0abc-6374-33a3-5d2757c0c37c" [ 934.047376] env[69475]: _type = "HttpNfcLease" [ 934.047376] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 934.057346] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 78b5496c-f8e2-4681-a36b-50897b0f7325/78b5496c-f8e2-4681-a36b-50897b0f7325.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 934.058568] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3dd3455-e353-4f70-a801-06d1166657b0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.061256] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09bad589-01a3-4411-a7f9-47597004938c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.082400] env[69475]: DEBUG oslo_vmware.rw_handles [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ebd675-5721-5a0c-969d-ff577b416132/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 934.082627] env[69475]: DEBUG oslo_vmware.rw_handles [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ebd675-5721-5a0c-969d-ff577b416132/disk-0.vmdk for reading. {{(pid=69475) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 934.085177] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Waiting for the task: (returnval){ [ 934.085177] env[69475]: value = "task-3508414" [ 934.085177] env[69475]: _type = "Task" [ 934.085177] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.150183] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.238s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.154396] env[69475]: DEBUG oslo_concurrency.lockutils [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.812s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.156075] env[69475]: INFO nova.compute.claims [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 934.165660] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508414, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.171888] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508412, 'name': Rename_Task, 'duration_secs': 0.157433} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.172531] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 934.172877] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa1b19e0-785b-425c-a306-6fa2c0b7c8cc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.177392] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508413, 'name': RemoveSnapshot_Task, 'duration_secs': 0.465995} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.177951] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 934.178792] env[69475]: DEBUG nova.compute.manager [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 934.179308] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc47fac-961c-4fc5-8041-83d7104546f0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.182155] env[69475]: INFO nova.scheduler.client.report [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Deleted allocations for instance 0a65565c-c679-47e5-8606-832fe3876af6 [ 934.184249] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 934.184249] env[69475]: value = "task-3508415" [ 934.184249] env[69475]: _type = "Task" [ 934.184249] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.206986] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508415, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.207860] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fd60135a-f24f-4ee7-89d8-713835b9866f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.214239] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527378c6-7278-4c51-4509-920b389fe542, 'name': SearchDatastore_Task, 'duration_secs': 0.011327} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.215529] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.215834] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 02ba199b-a7dc-421c-a14a-b562da275377/02ba199b-a7dc-421c-a14a-b562da275377.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 934.216120] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b54e161b-497c-48de-ba16-c38c0fe77ef6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.224525] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for the task: (returnval){ [ 934.224525] env[69475]: value = "task-3508416" [ 934.224525] env[69475]: _type = "Task" [ 934.224525] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.236565] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508416, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.417671] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Releasing lock "refresh_cache-b1b04eb9-ded6-4425-8a06-0c26c086a09b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.425336] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0418d910-65f5-4ac3-9586-9869c915c631 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Releasing lock "refresh_cache-f40aa0bb-af1d-4f8f-a906-f1c83307b465" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.602144] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508414, 'name': ReconfigVM_Task, 'duration_secs': 0.467853} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.602144] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 78b5496c-f8e2-4681-a36b-50897b0f7325/78b5496c-f8e2-4681-a36b-50897b0f7325.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 934.602678] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-18ac0c9f-fd35-44e3-b079-6466278dbfff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.612295] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Waiting for the task: (returnval){ [ 934.612295] env[69475]: value = "task-3508417" [ 934.612295] env[69475]: _type = "Task" [ 934.612295] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.622285] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508417, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.696302] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b72cf294-44a4-4a8e-8444-61c0f80873e6 tempest-ServerTagsTestJSON-1683260267 tempest-ServerTagsTestJSON-1683260267-project-member] Lock "0a65565c-c679-47e5-8606-832fe3876af6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.773s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.704104] env[69475]: INFO nova.compute.manager [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Shelve offloading [ 934.712336] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508415, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.738813] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508416, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.960938] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133d7529-3942-4576-a74c-9743e52ccbff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.983670] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-403048ac-a48f-4617-b993-ec8cadbcb66f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.993884] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Updating instance 'b1b04eb9-ded6-4425-8a06-0c26c086a09b' progress to 83 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 935.013781] env[69475]: DEBUG nova.compute.manager [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Stashing vm_state: active {{(pid=69475) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 935.124384] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508417, 'name': Rename_Task, 'duration_secs': 0.205565} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.124724] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 935.125199] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-649042ca-0146-4e3a-b3a4-6798151e7d6f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.132169] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Waiting for the task: (returnval){ [ 935.132169] env[69475]: value = "task-3508418" [ 935.132169] env[69475]: _type = "Task" [ 935.132169] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.144023] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508418, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.203757] env[69475]: DEBUG oslo_vmware.api [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508415, 'name': PowerOnVM_Task, 'duration_secs': 0.795238} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.203757] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 935.203757] env[69475]: INFO nova.compute.manager [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Took 17.44 seconds to spawn the instance on the hypervisor. [ 935.203757] env[69475]: DEBUG nova.compute.manager [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 935.203757] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1e9101-85a7-4198-8d25-efd25cac18d4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.215900] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 935.216490] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dc9752fa-6055-43e8-9522-f51e0fce282a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.224973] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 935.224973] env[69475]: value = "task-3508419" [ 935.224973] env[69475]: _type = "Task" [ 935.224973] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.248583] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508416, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594702} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.251819] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 02ba199b-a7dc-421c-a14a-b562da275377/02ba199b-a7dc-421c-a14a-b562da275377.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 935.252063] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 935.252377] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] VM already powered off {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 935.252623] env[69475]: DEBUG nova.compute.manager [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 935.253806] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a453738a-f310-4211-8419-5ce1971aa370 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.256391] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14036297-d1d0-4695-89f4-6c98803fd7db {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.267764] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 935.268066] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquired lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 935.268361] env[69475]: DEBUG nova.network.neutron [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 935.274929] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for the task: (returnval){ [ 935.274929] env[69475]: value = "task-3508420" [ 935.274929] env[69475]: _type = "Task" [ 935.274929] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.288691] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508420, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.438072] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0418d910-65f5-4ac3-9586-9869c915c631 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 935.438289] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ef96ed5c-bd88-470f-9afc-10c8c31f8ffb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.449359] env[69475]: DEBUG oslo_vmware.api [None req-0418d910-65f5-4ac3-9586-9869c915c631 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 935.449359] env[69475]: value = "task-3508421" [ 935.449359] env[69475]: _type = "Task" [ 935.449359] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.464279] env[69475]: DEBUG oslo_vmware.api [None req-0418d910-65f5-4ac3-9586-9869c915c631 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508421, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.500286] env[69475]: DEBUG nova.compute.manager [req-882bec17-d4ad-4aff-981f-3deab35db0e4 req-70a6af2e-fd53-4341-a6f4-1abb168fbeb6 service nova] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Received event network-vif-plugged-409b371c-7a12-4772-a463-d5e8bc596b60 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 935.502035] env[69475]: DEBUG oslo_concurrency.lockutils [req-882bec17-d4ad-4aff-981f-3deab35db0e4 req-70a6af2e-fd53-4341-a6f4-1abb168fbeb6 service nova] Acquiring lock "d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.502035] env[69475]: DEBUG oslo_concurrency.lockutils [req-882bec17-d4ad-4aff-981f-3deab35db0e4 req-70a6af2e-fd53-4341-a6f4-1abb168fbeb6 service nova] Lock "d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.502035] env[69475]: DEBUG oslo_concurrency.lockutils [req-882bec17-d4ad-4aff-981f-3deab35db0e4 req-70a6af2e-fd53-4341-a6f4-1abb168fbeb6 service nova] Lock "d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.502035] env[69475]: DEBUG nova.compute.manager [req-882bec17-d4ad-4aff-981f-3deab35db0e4 req-70a6af2e-fd53-4341-a6f4-1abb168fbeb6 service nova] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] No waiting events found dispatching network-vif-plugged-409b371c-7a12-4772-a463-d5e8bc596b60 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 935.502035] env[69475]: WARNING nova.compute.manager [req-882bec17-d4ad-4aff-981f-3deab35db0e4 req-70a6af2e-fd53-4341-a6f4-1abb168fbeb6 service nova] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Received unexpected event network-vif-plugged-409b371c-7a12-4772-a463-d5e8bc596b60 for instance with vm_state building and task_state spawning. [ 935.506951] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 935.507208] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67172e9c-2e2d-47c2-b9e9-5107b8cc426f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.521510] env[69475]: DEBUG oslo_vmware.api [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 935.521510] env[69475]: value = "task-3508422" [ 935.521510] env[69475]: _type = "Task" [ 935.521510] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.537512] env[69475]: DEBUG oslo_vmware.api [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508422, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.538715] env[69475]: DEBUG oslo_concurrency.lockutils [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 935.644933] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508418, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.722369] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6a1247-f2a6-4dfe-bd48-19ed14c1681f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.727868] env[69475]: DEBUG nova.network.neutron [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Successfully updated port: 409b371c-7a12-4772-a463-d5e8bc596b60 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 935.729884] env[69475]: INFO nova.compute.manager [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Took 35.72 seconds to build instance. [ 935.736466] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9870c6ed-e31e-478b-8bb7-42b457f0ac50 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.774478] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1257c2a3-db2d-4012-9d3f-7925e6c4b62b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.792472] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48890385-9a63-40c8-889c-1bf374af0ce9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.800017] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508420, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.154308} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.800017] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 935.801270] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611edfc5-5189-45cc-8ddf-7a40bcebe1fb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.813224] env[69475]: DEBUG nova.compute.provider_tree [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 935.838258] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 02ba199b-a7dc-421c-a14a-b562da275377/02ba199b-a7dc-421c-a14a-b562da275377.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 935.839638] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-558d0e4f-bdcf-41e5-9292-3b1be725fa52 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.867749] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for the task: (returnval){ [ 935.867749] env[69475]: value = "task-3508423" [ 935.867749] env[69475]: _type = "Task" [ 935.867749] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.879024] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508423, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.968860] env[69475]: DEBUG oslo_vmware.api [None req-0418d910-65f5-4ac3-9586-9869c915c631 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508421, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.043028] env[69475]: DEBUG oslo_vmware.api [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508422, 'name': PowerOnVM_Task, 'duration_secs': 0.474566} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.044492] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 936.044492] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c857b973-c8c0-44fa-a47f-85ec364e1f7a tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Updating instance 'b1b04eb9-ded6-4425-8a06-0c26c086a09b' progress to 100 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 936.144550] env[69475]: DEBUG oslo_vmware.api [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508418, 'name': PowerOnVM_Task, 'duration_secs': 0.768308} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.144907] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 936.145167] env[69475]: INFO nova.compute.manager [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Took 10.38 seconds to spawn the instance on the hypervisor. [ 936.145386] env[69475]: DEBUG nova.compute.manager [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 936.146245] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8e05c9-97a2-45ec-9572-d71de0575706 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.232829] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ff0e1df-fe95-499d-8d61-74b3b857aa36 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.239s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.233442] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Acquiring lock "refresh_cache-d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.233577] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Acquired lock "refresh_cache-d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.233728] env[69475]: DEBUG nova.network.neutron [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 936.317437] env[69475]: DEBUG nova.scheduler.client.report [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 936.378664] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508423, 'name': ReconfigVM_Task, 'duration_secs': 0.444667} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.378664] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 02ba199b-a7dc-421c-a14a-b562da275377/02ba199b-a7dc-421c-a14a-b562da275377.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 936.379237] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bcc41f26-1044-4c33-a6a2-d2588cd77e02 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.387685] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for the task: (returnval){ [ 936.387685] env[69475]: value = "task-3508424" [ 936.387685] env[69475]: _type = "Task" [ 936.387685] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.397988] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508424, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.399983] env[69475]: DEBUG nova.network.neutron [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Updating instance_info_cache with network_info: [{"id": "87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39", "address": "fa:16:3e:5a:ef:b3", "network": {"id": "62083b30-e966-4802-8960-367f1137a879", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-79648352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572bc56741e24d57a4d01f202c8fb78d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87df7fc0-9f", "ovs_interfaceid": "87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.464433] env[69475]: DEBUG oslo_vmware.api [None req-0418d910-65f5-4ac3-9586-9869c915c631 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508421, 'name': PowerOnVM_Task, 'duration_secs': 0.620356} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.464773] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0418d910-65f5-4ac3-9586-9869c915c631 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 936.465066] env[69475]: DEBUG nova.compute.manager [None req-0418d910-65f5-4ac3-9586-9869c915c631 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 936.465984] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b89f7f2-66dc-494b-82aa-619867142f73 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.670359] env[69475]: INFO nova.compute.manager [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Took 31.26 seconds to build instance. [ 936.768627] env[69475]: DEBUG nova.network.neutron [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 936.826594] env[69475]: DEBUG oslo_concurrency.lockutils [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.672s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.827144] env[69475]: DEBUG nova.compute.manager [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 936.829942] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.013s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.834018] env[69475]: DEBUG nova.objects.instance [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lazy-loading 'resources' on Instance uuid a21ec73a-2658-4fc6-9bc1-0e492385d59e {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 936.900609] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508424, 'name': Rename_Task, 'duration_secs': 0.273308} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.900895] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 936.901589] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Releasing lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 936.903750] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fd2513ac-809d-43c9-ac29-72a02086657a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.907991] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.908236] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.908436] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.908611] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.908774] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.911046] env[69475]: INFO nova.compute.manager [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Terminating instance [ 936.917967] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for the task: (returnval){ [ 936.917967] env[69475]: value = "task-3508425" [ 936.917967] env[69475]: _type = "Task" [ 936.917967] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.929459] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508425, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.983857] env[69475]: DEBUG nova.network.neutron [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Updating instance_info_cache with network_info: [{"id": "409b371c-7a12-4772-a463-d5e8bc596b60", "address": "fa:16:3e:2b:c1:7e", "network": {"id": "ed1543e1-b33e-4edc-b541-6a2a167af5e3", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1423926111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13c35dc0fd1c42b083d2a3c7070ed230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap409b371c-7a", "ovs_interfaceid": "409b371c-7a12-4772-a463-d5e8bc596b60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.173048] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b62f3bdc-68ce-4cc1-84e5-0b4714777ad1 tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Lock "78b5496c-f8e2-4681-a36b-50897b0f7325" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.774s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.334191] env[69475]: DEBUG nova.compute.utils [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 937.336702] env[69475]: DEBUG nova.compute.manager [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 937.336881] env[69475]: DEBUG nova.network.neutron [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 937.417575] env[69475]: DEBUG nova.compute.manager [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 937.417710] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 937.418937] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-710d54bd-71b7-4298-a4c4-e40f4670ef11 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.441955] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508425, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.442288] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 937.445444] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a1c4784-493b-4b2b-b8f9-0e3e18b2bfd9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.452919] env[69475]: DEBUG oslo_vmware.api [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 937.452919] env[69475]: value = "task-3508426" [ 937.452919] env[69475]: _type = "Task" [ 937.452919] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.464681] env[69475]: DEBUG oslo_vmware.api [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508426, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.490672] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Releasing lock "refresh_cache-d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 937.490672] env[69475]: DEBUG nova.compute.manager [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Instance network_info: |[{"id": "409b371c-7a12-4772-a463-d5e8bc596b60", "address": "fa:16:3e:2b:c1:7e", "network": {"id": "ed1543e1-b33e-4edc-b541-6a2a167af5e3", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1423926111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13c35dc0fd1c42b083d2a3c7070ed230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap409b371c-7a", "ovs_interfaceid": "409b371c-7a12-4772-a463-d5e8bc596b60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 937.490974] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:c1:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f78b07ea-f425-4622-84f4-706a5d8820a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '409b371c-7a12-4772-a463-d5e8bc596b60', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 937.499282] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Creating folder: Project (13c35dc0fd1c42b083d2a3c7070ed230). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 937.503024] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02eb5f8f-070c-447d-b864-546974b7bb6d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.521132] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Created folder: Project (13c35dc0fd1c42b083d2a3c7070ed230) in parent group-v700823. [ 937.521363] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Creating folder: Instances. Parent ref: group-v701045. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 937.521669] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c99ea56a-7c03-4038-8684-6c631663198d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.536291] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Created folder: Instances in parent group-v701045. [ 937.536599] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 937.536835] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 937.537123] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4f73c913-0e2d-46bc-acbf-8dd38ea6303b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.572876] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 937.572876] env[69475]: value = "task-3508429" [ 937.572876] env[69475]: _type = "Task" [ 937.572876] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.574777] env[69475]: DEBUG nova.compute.manager [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 937.578921] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fb39bf6-9d68-463a-b183-4c77d0ca6780 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.596213] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508429, 'name': CreateVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.644192] env[69475]: DEBUG nova.policy [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '18b7f61d026c4157827358583b2de42f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '85953f12c4d7442993b8212939a14c35', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 937.691106] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 937.692111] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d3dda5-32b8-4e10-860a-1938dc19ca15 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.706244] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 937.706686] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-603bc6ed-72a5-4404-bc64-ad7aca14b403 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.785977] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 937.786149] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 937.786359] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Deleting the datastore file [datastore2] e8c2d21e-2e42-48de-928e-c5fd944899b6 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 937.786639] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bcf0eb5a-9042-48c3-a9d6-3322270b22bf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.798086] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 937.798086] env[69475]: value = "task-3508431" [ 937.798086] env[69475]: _type = "Task" [ 937.798086] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.814776] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508431, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.844761] env[69475]: DEBUG nova.compute.manager [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 937.937719] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8a718ec-9e73-421e-bf4d-1b66435a105e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.944932] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508425, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.951224] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9bed8e-b455-44ec-a08b-d7d264f024fb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.965110] env[69475]: DEBUG oslo_vmware.api [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508426, 'name': PowerOffVM_Task, 'duration_secs': 0.415112} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.993478] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 937.993800] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 937.994861] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aadf312f-f1f9-4ca7-a93e-25d8727ffb44 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.997492] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e306d6b5-a403-4bb8-9ca7-04bf271bb271 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.007583] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c285ca-7f59-41c5-940b-b0c47245b629 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.025125] env[69475]: DEBUG nova.compute.provider_tree [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 938.072999] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 938.074389] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 938.074389] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Deleting the datastore file [datastore1] 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 938.074389] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-92edfb3f-195b-45a9-af4e-fef2498de22a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.090372] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508429, 'name': CreateVM_Task, 'duration_secs': 0.468285} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.093488] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 938.093543] env[69475]: DEBUG oslo_vmware.api [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 938.093543] env[69475]: value = "task-3508433" [ 938.093543] env[69475]: _type = "Task" [ 938.093543] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.094425] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.094640] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 938.095061] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 938.095483] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-575d9464-3301-460f-adcd-0aed622e62ee {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.104214] env[69475]: INFO nova.compute.manager [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] instance snapshotting [ 938.105209] env[69475]: DEBUG nova.objects.instance [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lazy-loading 'flavor' on Instance uuid 82236043-3222-4134-8717-4c239ed12aba {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 938.111493] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Waiting for the task: (returnval){ [ 938.111493] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5261edb0-8cc4-8c36-2a9a-149fa82994d7" [ 938.111493] env[69475]: _type = "Task" [ 938.111493] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.116694] env[69475]: DEBUG oslo_vmware.api [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508433, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.131672] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5261edb0-8cc4-8c36-2a9a-149fa82994d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.194711] env[69475]: DEBUG nova.network.neutron [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Successfully created port: d77fc39a-89ae-47b6-8770-a620acc4eab3 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 938.200320] env[69475]: DEBUG nova.compute.manager [req-dee62fca-a763-41c5-927c-cdc664e2fc17 req-eb43be68-0e78-49d9-a133-318cc872bfde service nova] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Received event network-changed-409b371c-7a12-4772-a463-d5e8bc596b60 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 938.200520] env[69475]: DEBUG nova.compute.manager [req-dee62fca-a763-41c5-927c-cdc664e2fc17 req-eb43be68-0e78-49d9-a133-318cc872bfde service nova] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Refreshing instance network info cache due to event network-changed-409b371c-7a12-4772-a463-d5e8bc596b60. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 938.200739] env[69475]: DEBUG oslo_concurrency.lockutils [req-dee62fca-a763-41c5-927c-cdc664e2fc17 req-eb43be68-0e78-49d9-a133-318cc872bfde service nova] Acquiring lock "refresh_cache-d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.200897] env[69475]: DEBUG oslo_concurrency.lockutils [req-dee62fca-a763-41c5-927c-cdc664e2fc17 req-eb43be68-0e78-49d9-a133-318cc872bfde service nova] Acquired lock "refresh_cache-d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 938.201103] env[69475]: DEBUG nova.network.neutron [req-dee62fca-a763-41c5-927c-cdc664e2fc17 req-eb43be68-0e78-49d9-a133-318cc872bfde service nova] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Refreshing network info cache for port 409b371c-7a12-4772-a463-d5e8bc596b60 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 938.309473] env[69475]: DEBUG oslo_vmware.api [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508431, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.236545} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.309725] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 938.309914] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 938.310127] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 938.340621] env[69475]: INFO nova.scheduler.client.report [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Deleted allocations for instance e8c2d21e-2e42-48de-928e-c5fd944899b6 [ 938.439710] env[69475]: DEBUG oslo_vmware.api [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508425, 'name': PowerOnVM_Task, 'duration_secs': 1.279737} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.439894] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 938.440113] env[69475]: INFO nova.compute.manager [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Took 10.06 seconds to spawn the instance on the hypervisor. [ 938.440294] env[69475]: DEBUG nova.compute.manager [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 938.441188] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43e513a8-c67b-465b-87d0-215fca9902a0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.528338] env[69475]: DEBUG nova.scheduler.client.report [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 938.606024] env[69475]: DEBUG oslo_vmware.api [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508433, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216987} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.606356] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 938.606506] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 938.606736] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 938.607668] env[69475]: INFO nova.compute.manager [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Took 1.19 seconds to destroy the instance on the hypervisor. [ 938.607668] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 938.607668] env[69475]: DEBUG nova.compute.manager [-] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 938.607668] env[69475]: DEBUG nova.network.neutron [-] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 938.612843] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6b4689-c867-4a61-b262-50cc2537862b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.636923] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7165c3e3-4099-457c-b57c-ed7d43b7bc3d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.643403] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5261edb0-8cc4-8c36-2a9a-149fa82994d7, 'name': SearchDatastore_Task, 'duration_secs': 0.017053} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.644089] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 938.644338] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 938.644600] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 938.644743] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 938.644928] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 938.645207] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-de2892c7-a73f-49d9-82f5-491944dbab69 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.662645] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 938.662772] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 938.663607] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fb82d8c-7261-455d-aaa1-029d2f1e79e2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.671448] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Waiting for the task: (returnval){ [ 938.671448] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522f6d5f-85ec-9189-eea3-2c0210ddb942" [ 938.671448] env[69475]: _type = "Task" [ 938.671448] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.683826] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522f6d5f-85ec-9189-eea3-2c0210ddb942, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.716484] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquiring lock "a87da6e4-d7ec-4624-94bc-b76ade04d511" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.716861] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "a87da6e4-d7ec-4624-94bc-b76ade04d511" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.716933] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquiring lock "a87da6e4-d7ec-4624-94bc-b76ade04d511-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.717125] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "a87da6e4-d7ec-4624-94bc-b76ade04d511-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.717293] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "a87da6e4-d7ec-4624-94bc-b76ade04d511-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.719442] env[69475]: INFO nova.compute.manager [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Terminating instance [ 938.846370] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.855012] env[69475]: DEBUG nova.compute.manager [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 938.889946] env[69475]: DEBUG nova.virt.hardware [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 938.890237] env[69475]: DEBUG nova.virt.hardware [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 938.890396] env[69475]: DEBUG nova.virt.hardware [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 938.892725] env[69475]: DEBUG nova.virt.hardware [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 938.892725] env[69475]: DEBUG nova.virt.hardware [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 938.892725] env[69475]: DEBUG nova.virt.hardware [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 938.892725] env[69475]: DEBUG nova.virt.hardware [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 938.892725] env[69475]: DEBUG nova.virt.hardware [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 938.892725] env[69475]: DEBUG nova.virt.hardware [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 938.892725] env[69475]: DEBUG nova.virt.hardware [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 938.892725] env[69475]: DEBUG nova.virt.hardware [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 938.892725] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2779ed0-8b0b-4e49-9c27-c106afa024bd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.901882] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0d9b68-a460-48cf-8f40-5174ab205049 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.944677] env[69475]: DEBUG nova.network.neutron [req-dee62fca-a763-41c5-927c-cdc664e2fc17 req-eb43be68-0e78-49d9-a133-318cc872bfde service nova] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Updated VIF entry in instance network info cache for port 409b371c-7a12-4772-a463-d5e8bc596b60. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 938.945146] env[69475]: DEBUG nova.network.neutron [req-dee62fca-a763-41c5-927c-cdc664e2fc17 req-eb43be68-0e78-49d9-a133-318cc872bfde service nova] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Updating instance_info_cache with network_info: [{"id": "409b371c-7a12-4772-a463-d5e8bc596b60", "address": "fa:16:3e:2b:c1:7e", "network": {"id": "ed1543e1-b33e-4edc-b541-6a2a167af5e3", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1423926111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13c35dc0fd1c42b083d2a3c7070ed230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap409b371c-7a", "ovs_interfaceid": "409b371c-7a12-4772-a463-d5e8bc596b60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.962625] env[69475]: INFO nova.compute.manager [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Took 31.11 seconds to build instance. [ 939.034305] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.204s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.037281] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.814s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.038429] env[69475]: DEBUG nova.objects.instance [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Lazy-loading 'resources' on Instance uuid 8cc0636c-84af-4f68-bec8-1493b421a605 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 939.071695] env[69475]: INFO nova.scheduler.client.report [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Deleted allocations for instance a21ec73a-2658-4fc6-9bc1-0e492385d59e [ 939.154026] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 939.154931] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-19a37bf8-ae50-4117-a5ab-bac59df37fef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.165808] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 939.165808] env[69475]: value = "task-3508434" [ 939.165808] env[69475]: _type = "Task" [ 939.165808] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.180606] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508434, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.188238] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522f6d5f-85ec-9189-eea3-2c0210ddb942, 'name': SearchDatastore_Task, 'duration_secs': 0.022926} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.189617] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ed20df3-8f7a-4977-a582-229c8f009771 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.197915] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Waiting for the task: (returnval){ [ 939.197915] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5283b0d7-18f3-812b-ed63-c574a7f14a75" [ 939.197915] env[69475]: _type = "Task" [ 939.197915] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.207252] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5283b0d7-18f3-812b-ed63-c574a7f14a75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.224033] env[69475]: DEBUG nova.compute.manager [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 939.224340] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 939.225578] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c6c707-1f9e-42aa-8f89-1d4ef7d8045f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.234823] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 939.235168] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69ff7dc8-41f7-413d-8609-1daf672a85eb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.243168] env[69475]: DEBUG oslo_vmware.api [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 939.243168] env[69475]: value = "task-3508435" [ 939.243168] env[69475]: _type = "Task" [ 939.243168] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.252821] env[69475]: DEBUG oslo_vmware.api [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508435, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.414805] env[69475]: DEBUG nova.network.neutron [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Port f9a10762-ba87-425f-9623-1ffdf22c5bb4 binding to destination host cpu-1 is already ACTIVE {{(pid=69475) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 939.415202] env[69475]: DEBUG oslo_concurrency.lockutils [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "refresh_cache-b1b04eb9-ded6-4425-8a06-0c26c086a09b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.415343] env[69475]: DEBUG oslo_concurrency.lockutils [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquired lock "refresh_cache-b1b04eb9-ded6-4425-8a06-0c26c086a09b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.415517] env[69475]: DEBUG nova.network.neutron [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 939.448404] env[69475]: DEBUG oslo_concurrency.lockutils [req-dee62fca-a763-41c5-927c-cdc664e2fc17 req-eb43be68-0e78-49d9-a133-318cc872bfde service nova] Releasing lock "refresh_cache-d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.448692] env[69475]: DEBUG nova.compute.manager [req-dee62fca-a763-41c5-927c-cdc664e2fc17 req-eb43be68-0e78-49d9-a133-318cc872bfde service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Received event network-vif-unplugged-87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 939.448887] env[69475]: DEBUG oslo_concurrency.lockutils [req-dee62fca-a763-41c5-927c-cdc664e2fc17 req-eb43be68-0e78-49d9-a133-318cc872bfde service nova] Acquiring lock "e8c2d21e-2e42-48de-928e-c5fd944899b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 939.449131] env[69475]: DEBUG oslo_concurrency.lockutils [req-dee62fca-a763-41c5-927c-cdc664e2fc17 req-eb43be68-0e78-49d9-a133-318cc872bfde service nova] Lock "e8c2d21e-2e42-48de-928e-c5fd944899b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 939.449326] env[69475]: DEBUG oslo_concurrency.lockutils [req-dee62fca-a763-41c5-927c-cdc664e2fc17 req-eb43be68-0e78-49d9-a133-318cc872bfde service nova] Lock "e8c2d21e-2e42-48de-928e-c5fd944899b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.449506] env[69475]: DEBUG nova.compute.manager [req-dee62fca-a763-41c5-927c-cdc664e2fc17 req-eb43be68-0e78-49d9-a133-318cc872bfde service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] No waiting events found dispatching network-vif-unplugged-87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 939.449677] env[69475]: WARNING nova.compute.manager [req-dee62fca-a763-41c5-927c-cdc664e2fc17 req-eb43be68-0e78-49d9-a133-318cc872bfde service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Received unexpected event network-vif-unplugged-87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 for instance with vm_state shelved and task_state shelving_offloading. [ 939.468059] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7a37851a-9e9b-4ad5-b33a-c9ef0f4e06f0 tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Lock "02ba199b-a7dc-421c-a14a-b562da275377" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.628s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.522284] env[69475]: DEBUG nova.network.neutron [-] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 939.581254] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e35eb8f8-2b76-4f5f-9fb9-4942839eeff9 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "a21ec73a-2658-4fc6-9bc1-0e492385d59e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.527s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 939.678616] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508434, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.709610] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5283b0d7-18f3-812b-ed63-c574a7f14a75, 'name': SearchDatastore_Task, 'duration_secs': 0.022139} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.713713] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 939.714095] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3/d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 939.717569] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8cb12fa5-419b-4581-9927-7852e0174889 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.726960] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Waiting for the task: (returnval){ [ 939.726960] env[69475]: value = "task-3508436" [ 939.726960] env[69475]: _type = "Task" [ 939.726960] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.741143] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': task-3508436, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.755045] env[69475]: DEBUG oslo_vmware.api [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508435, 'name': PowerOffVM_Task, 'duration_secs': 0.417137} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.758600] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 939.758721] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 939.759376] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d33b560-18a6-48cf-976a-d552cdcbc989 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.836228] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 939.836492] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 939.836726] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Deleting the datastore file [datastore2] a87da6e4-d7ec-4624-94bc-b76ade04d511 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 939.837042] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6fc4f3df-31ab-4162-bf72-240e32125644 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.845590] env[69475]: DEBUG oslo_vmware.api [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for the task: (returnval){ [ 939.845590] env[69475]: value = "task-3508438" [ 939.845590] env[69475]: _type = "Task" [ 939.845590] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.859525] env[69475]: DEBUG oslo_vmware.api [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508438, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.965120] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5febc35-a71a-4f64-b1c7-a58d1b7d17ba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.974172] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ffce93d-f40e-4906-902b-f9f92e9c4197 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.019804] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31f583bd-104c-4a47-8bf6-167dfbaae929 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.028855] env[69475]: INFO nova.compute.manager [-] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Took 1.42 seconds to deallocate network for instance. [ 940.032036] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96e603d-9550-49eb-a095-f7953ac32a95 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.054286] env[69475]: DEBUG nova.compute.provider_tree [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.178998] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508434, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.218502] env[69475]: INFO nova.compute.manager [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Rescuing [ 940.218821] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Acquiring lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.219062] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Acquired lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.219352] env[69475]: DEBUG nova.network.neutron [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 940.247386] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': task-3508436, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.365276] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.365276] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.375600] env[69475]: DEBUG oslo_vmware.api [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508438, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.484553] env[69475]: DEBUG nova.network.neutron [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Successfully updated port: d77fc39a-89ae-47b6-8770-a620acc4eab3 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 940.514810] env[69475]: DEBUG nova.network.neutron [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Updating instance_info_cache with network_info: [{"id": "f9a10762-ba87-425f-9623-1ffdf22c5bb4", "address": "fa:16:3e:83:ce:8a", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.106", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9a10762-ba", "ovs_interfaceid": "f9a10762-ba87-425f-9623-1ffdf22c5bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.543065] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.557530] env[69475]: DEBUG nova.scheduler.client.report [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 940.677974] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508434, 'name': CreateSnapshot_Task, 'duration_secs': 1.019051} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.678276] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 940.679119] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a545d3-95c4-4be4-8696-1fbb3aaac067 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.739156] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': task-3508436, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.70208} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.739478] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3/d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 940.739620] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 940.739810] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c3e83a1-38b8-426a-b6bb-20656bb88ef5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.746026] env[69475]: DEBUG nova.compute.manager [req-6ef8e9f8-75fc-48e7-a28a-60330e1e84f5 req-78b1d2dd-e59b-44a1-b4ea-e52a2c778fa9 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Received event network-changed-87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 940.746249] env[69475]: DEBUG nova.compute.manager [req-6ef8e9f8-75fc-48e7-a28a-60330e1e84f5 req-78b1d2dd-e59b-44a1-b4ea-e52a2c778fa9 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Refreshing instance network info cache due to event network-changed-87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 940.746578] env[69475]: DEBUG oslo_concurrency.lockutils [req-6ef8e9f8-75fc-48e7-a28a-60330e1e84f5 req-78b1d2dd-e59b-44a1-b4ea-e52a2c778fa9 service nova] Acquiring lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.746578] env[69475]: DEBUG oslo_concurrency.lockutils [req-6ef8e9f8-75fc-48e7-a28a-60330e1e84f5 req-78b1d2dd-e59b-44a1-b4ea-e52a2c778fa9 service nova] Acquired lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.746829] env[69475]: DEBUG nova.network.neutron [req-6ef8e9f8-75fc-48e7-a28a-60330e1e84f5 req-78b1d2dd-e59b-44a1-b4ea-e52a2c778fa9 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Refreshing network info cache for port 87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 940.749344] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Waiting for the task: (returnval){ [ 940.749344] env[69475]: value = "task-3508439" [ 940.749344] env[69475]: _type = "Task" [ 940.749344] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.760926] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': task-3508439, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.862193] env[69475]: DEBUG oslo_vmware.api [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Task: {'id': task-3508438, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.727035} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.863383] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 940.863383] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 940.863383] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 940.863383] env[69475]: INFO nova.compute.manager [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Took 1.64 seconds to destroy the instance on the hypervisor. [ 940.863383] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 940.863645] env[69475]: DEBUG nova.compute.manager [-] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 940.863645] env[69475]: DEBUG nova.network.neutron [-] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 940.867781] env[69475]: DEBUG nova.compute.manager [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 940.987840] env[69475]: DEBUG oslo_concurrency.lockutils [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Acquiring lock "refresh_cache-c9b2f701-a73a-4561-b637-62e3ce98a44f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 940.988016] env[69475]: DEBUG oslo_concurrency.lockutils [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Acquired lock "refresh_cache-c9b2f701-a73a-4561-b637-62e3ce98a44f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 940.988173] env[69475]: DEBUG nova.network.neutron [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 941.018595] env[69475]: DEBUG oslo_concurrency.lockutils [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Releasing lock "refresh_cache-b1b04eb9-ded6-4425-8a06-0c26c086a09b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 941.064222] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.027s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.066529] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.642s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.068486] env[69475]: INFO nova.compute.claims [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 941.094138] env[69475]: DEBUG oslo_concurrency.lockutils [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "235653ac-a893-4f42-a394-dd81f61f0d73" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.094397] env[69475]: DEBUG oslo_concurrency.lockutils [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "235653ac-a893-4f42-a394-dd81f61f0d73" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.094604] env[69475]: DEBUG oslo_concurrency.lockutils [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "235653ac-a893-4f42-a394-dd81f61f0d73-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.094781] env[69475]: DEBUG oslo_concurrency.lockutils [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "235653ac-a893-4f42-a394-dd81f61f0d73-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.094941] env[69475]: DEBUG oslo_concurrency.lockutils [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "235653ac-a893-4f42-a394-dd81f61f0d73-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.097465] env[69475]: INFO nova.compute.manager [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Terminating instance [ 941.169322] env[69475]: DEBUG nova.network.neutron [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Updating instance_info_cache with network_info: [{"id": "04c0ec8b-7341-4495-9aa9-5edcc8fd816a", "address": "fa:16:3e:16:48:72", "network": {"id": "b4898ad4-4e6f-4225-a619-45236df7e9ad", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-809945386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4d8b5413bb2444538234a0c37633c89f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "073f8535-6b3a-4d21-a754-4c975554dcbf", "external-id": "nsx-vlan-transportzone-111", "segmentation_id": 111, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04c0ec8b-73", "ovs_interfaceid": "04c0ec8b-7341-4495-9aa9-5edcc8fd816a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.198343] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 941.198918] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-2a40743e-089d-41f7-8185-2d40aaca4f27 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.210168] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 941.210168] env[69475]: value = "task-3508440" [ 941.210168] env[69475]: _type = "Task" [ 941.210168] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.218411] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508440, 'name': CloneVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.262680] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': task-3508439, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076322} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.262976] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 941.264106] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f2e747-70b3-4e98-b8a6-a13b1c729043 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.290392] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3/d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 941.290784] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b0e5150-0077-4238-bb02-40e3a75295e7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.314405] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Waiting for the task: (returnval){ [ 941.314405] env[69475]: value = "task-3508441" [ 941.314405] env[69475]: _type = "Task" [ 941.314405] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.327885] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': task-3508441, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.393462] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.525041] env[69475]: DEBUG nova.compute.manager [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69475) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 941.529107] env[69475]: DEBUG oslo_concurrency.lockutils [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.544128] env[69475]: DEBUG nova.network.neutron [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 941.591475] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0e2e2433-9cc7-41f3-b836-24cbb040b355 tempest-ServerActionsV293TestJSON-1453348147 tempest-ServerActionsV293TestJSON-1453348147-project-member] Lock "8cc0636c-84af-4f68-bec8-1493b421a605" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.259s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.603088] env[69475]: DEBUG nova.compute.manager [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 941.603088] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 941.603564] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca52da3e-eedb-41bd-82f8-56aabfcd1b9b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.614649] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 941.615131] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5bc7959-3d15-44a7-8ff8-63197d14a1c1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.625350] env[69475]: DEBUG oslo_vmware.api [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 941.625350] env[69475]: value = "task-3508442" [ 941.625350] env[69475]: _type = "Task" [ 941.625350] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.639791] env[69475]: DEBUG oslo_vmware.api [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508442, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.672442] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Releasing lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 941.725977] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508440, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.811421] env[69475]: DEBUG nova.network.neutron [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Updating instance_info_cache with network_info: [{"id": "d77fc39a-89ae-47b6-8770-a620acc4eab3", "address": "fa:16:3e:ea:d2:ac", "network": {"id": "cdd4d2b4-ed84-46e1-8f22-c35e9bb2a79e", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1352479680-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "85953f12c4d7442993b8212939a14c35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e68d1a61-2c49-4777-87c4-5eb73c467ad3", "external-id": "nsx-vlan-transportzone-52", "segmentation_id": 52, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd77fc39a-89", "ovs_interfaceid": "d77fc39a-89ae-47b6-8770-a620acc4eab3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.831617] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': task-3508441, 'name': ReconfigVM_Task, 'duration_secs': 0.512333} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.831617] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Reconfigured VM instance instance-00000051 to attach disk [datastore2] d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3/d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 941.831617] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-163029de-cd2f-4209-9c43-526513769ce8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.842179] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Waiting for the task: (returnval){ [ 941.842179] env[69475]: value = "task-3508443" [ 941.842179] env[69475]: _type = "Task" [ 941.842179] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.857105] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': task-3508443, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.965435] env[69475]: DEBUG nova.network.neutron [req-6ef8e9f8-75fc-48e7-a28a-60330e1e84f5 req-78b1d2dd-e59b-44a1-b4ea-e52a2c778fa9 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Updated VIF entry in instance network info cache for port 87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 941.965830] env[69475]: DEBUG nova.network.neutron [req-6ef8e9f8-75fc-48e7-a28a-60330e1e84f5 req-78b1d2dd-e59b-44a1-b4ea-e52a2c778fa9 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Updating instance_info_cache with network_info: [{"id": "87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39", "address": "fa:16:3e:5a:ef:b3", "network": {"id": "62083b30-e966-4802-8960-367f1137a879", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-79648352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572bc56741e24d57a4d01f202c8fb78d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap87df7fc0-9f", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.136452] env[69475]: DEBUG oslo_vmware.api [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508442, 'name': PowerOffVM_Task, 'duration_secs': 0.316125} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.139245] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 942.139425] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 942.139946] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85153b36-d800-4145-8ed0-aa509e8f7780 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.226035] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508440, 'name': CloneVM_Task} progress is 95%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.226035] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 942.226255] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 942.226290] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Deleting the datastore file [datastore2] 235653ac-a893-4f42-a394-dd81f61f0d73 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 942.226518] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2ee65814-675f-42f8-a2e9-2d9f5dcde5a6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.235546] env[69475]: DEBUG oslo_vmware.api [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for the task: (returnval){ [ 942.235546] env[69475]: value = "task-3508445" [ 942.235546] env[69475]: _type = "Task" [ 942.235546] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.248208] env[69475]: DEBUG oslo_vmware.api [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508445, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.318461] env[69475]: DEBUG oslo_concurrency.lockutils [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Releasing lock "refresh_cache-c9b2f701-a73a-4561-b637-62e3ce98a44f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.319393] env[69475]: DEBUG nova.compute.manager [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Instance network_info: |[{"id": "d77fc39a-89ae-47b6-8770-a620acc4eab3", "address": "fa:16:3e:ea:d2:ac", "network": {"id": "cdd4d2b4-ed84-46e1-8f22-c35e9bb2a79e", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1352479680-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "85953f12c4d7442993b8212939a14c35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e68d1a61-2c49-4777-87c4-5eb73c467ad3", "external-id": "nsx-vlan-transportzone-52", "segmentation_id": 52, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd77fc39a-89", "ovs_interfaceid": "d77fc39a-89ae-47b6-8770-a620acc4eab3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 942.320490] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:d2:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e68d1a61-2c49-4777-87c4-5eb73c467ad3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd77fc39a-89ae-47b6-8770-a620acc4eab3', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 942.333555] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Creating folder: Project (85953f12c4d7442993b8212939a14c35). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 942.336636] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e404b125-1c98-4dd9-bbdb-2c8748b72d1f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.349796] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Created folder: Project (85953f12c4d7442993b8212939a14c35) in parent group-v700823. [ 942.350018] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Creating folder: Instances. Parent ref: group-v701050. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 942.353799] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f666d21-3105-4e47-ac01-0a8ccd88822e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.356298] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': task-3508443, 'name': Rename_Task, 'duration_secs': 0.232644} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.359256] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 942.361797] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b4e7f709-be4e-4857-aa69-603f96888d9c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.371836] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Waiting for the task: (returnval){ [ 942.371836] env[69475]: value = "task-3508448" [ 942.371836] env[69475]: _type = "Task" [ 942.371836] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.378806] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Created folder: Instances in parent group-v701050. [ 942.379239] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 942.379413] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 942.381159] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-504ebb04-2097-4a57-8688-8ed2abfb11f0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.401781] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': task-3508448, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.408400] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 942.408400] env[69475]: value = "task-3508449" [ 942.408400] env[69475]: _type = "Task" [ 942.408400] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.420489] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508449, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.468819] env[69475]: DEBUG oslo_concurrency.lockutils [req-6ef8e9f8-75fc-48e7-a28a-60330e1e84f5 req-78b1d2dd-e59b-44a1-b4ea-e52a2c778fa9 service nova] Releasing lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 942.469120] env[69475]: DEBUG nova.compute.manager [req-6ef8e9f8-75fc-48e7-a28a-60330e1e84f5 req-78b1d2dd-e59b-44a1-b4ea-e52a2c778fa9 service nova] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Received event network-vif-deleted-9aa71b8b-3116-4297-a480-30aa5caf507d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 942.469517] env[69475]: DEBUG nova.compute.manager [req-6ef8e9f8-75fc-48e7-a28a-60330e1e84f5 req-78b1d2dd-e59b-44a1-b4ea-e52a2c778fa9 service nova] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Received event network-vif-plugged-d77fc39a-89ae-47b6-8770-a620acc4eab3 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 942.469517] env[69475]: DEBUG oslo_concurrency.lockutils [req-6ef8e9f8-75fc-48e7-a28a-60330e1e84f5 req-78b1d2dd-e59b-44a1-b4ea-e52a2c778fa9 service nova] Acquiring lock "c9b2f701-a73a-4561-b637-62e3ce98a44f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.469731] env[69475]: DEBUG oslo_concurrency.lockutils [req-6ef8e9f8-75fc-48e7-a28a-60330e1e84f5 req-78b1d2dd-e59b-44a1-b4ea-e52a2c778fa9 service nova] Lock "c9b2f701-a73a-4561-b637-62e3ce98a44f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 942.469889] env[69475]: DEBUG oslo_concurrency.lockutils [req-6ef8e9f8-75fc-48e7-a28a-60330e1e84f5 req-78b1d2dd-e59b-44a1-b4ea-e52a2c778fa9 service nova] Lock "c9b2f701-a73a-4561-b637-62e3ce98a44f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 942.470113] env[69475]: DEBUG nova.compute.manager [req-6ef8e9f8-75fc-48e7-a28a-60330e1e84f5 req-78b1d2dd-e59b-44a1-b4ea-e52a2c778fa9 service nova] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] No waiting events found dispatching network-vif-plugged-d77fc39a-89ae-47b6-8770-a620acc4eab3 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 942.470314] env[69475]: WARNING nova.compute.manager [req-6ef8e9f8-75fc-48e7-a28a-60330e1e84f5 req-78b1d2dd-e59b-44a1-b4ea-e52a2c778fa9 service nova] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Received unexpected event network-vif-plugged-d77fc39a-89ae-47b6-8770-a620acc4eab3 for instance with vm_state building and task_state spawning. [ 942.521948] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "e8c2d21e-2e42-48de-928e-c5fd944899b6" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.609651] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c01a8786-3903-479c-b019-c88d5f25b463 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.620225] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b5083b-2b8d-4e67-83ff-2a39998272b8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.660300] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-130e528e-f79a-4b1c-b9d9-58a7f61ed012 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.669739] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2c654f5-38d3-4322-bf28-f78c26ede952 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.685444] env[69475]: DEBUG nova.compute.provider_tree [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 942.724593] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508440, 'name': CloneVM_Task, 'duration_secs': 1.472401} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.724988] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Created linked-clone VM from snapshot [ 942.726809] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e86a5b9a-13ef-498b-84ce-03e419fdc811 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.744073] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Uploading image 71a76a24-1a96-4056-b949-89270aaca820 {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 942.764192] env[69475]: DEBUG oslo_vmware.api [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Task: {'id': task-3508445, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.340234} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.764470] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 942.764650] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 942.764823] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 942.764989] env[69475]: INFO nova.compute.manager [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Took 1.16 seconds to destroy the instance on the hypervisor. [ 942.765270] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 942.765460] env[69475]: DEBUG nova.compute.manager [-] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 942.768035] env[69475]: DEBUG nova.network.neutron [-] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 942.784780] env[69475]: DEBUG oslo_vmware.rw_handles [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 942.784780] env[69475]: value = "vm-701049" [ 942.784780] env[69475]: _type = "VirtualMachine" [ 942.784780] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 942.785117] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f8143509-0a18-4e2b-98f7-26af5bbb91ab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.797834] env[69475]: DEBUG oslo_vmware.rw_handles [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lease: (returnval){ [ 942.797834] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526be8eb-3ea9-4415-65ad-a38db5a6db72" [ 942.797834] env[69475]: _type = "HttpNfcLease" [ 942.797834] env[69475]: } obtained for exporting VM: (result){ [ 942.797834] env[69475]: value = "vm-701049" [ 942.797834] env[69475]: _type = "VirtualMachine" [ 942.797834] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 942.798488] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the lease: (returnval){ [ 942.798488] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526be8eb-3ea9-4415-65ad-a38db5a6db72" [ 942.798488] env[69475]: _type = "HttpNfcLease" [ 942.798488] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 942.811113] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 942.811113] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526be8eb-3ea9-4415-65ad-a38db5a6db72" [ 942.811113] env[69475]: _type = "HttpNfcLease" [ 942.811113] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 942.888572] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': task-3508448, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.919341] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508449, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.924081] env[69475]: DEBUG nova.network.neutron [-] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.224139] env[69475]: ERROR nova.scheduler.client.report [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [req-c54b83f9-c57a-419a-97b2-b87a7dec616f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dd221100-68c1-4a75-92b5-b24d81fee5da. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c54b83f9-c57a-419a-97b2-b87a7dec616f"}]} [ 943.253677] env[69475]: DEBUG nova.scheduler.client.report [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Refreshing inventories for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 943.260490] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 943.260490] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9f1ec67e-ed2e-4f0d-8343-8c02860acf8d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.268701] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for the task: (returnval){ [ 943.268701] env[69475]: value = "task-3508451" [ 943.268701] env[69475]: _type = "Task" [ 943.268701] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.275263] env[69475]: DEBUG nova.scheduler.client.report [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Updating ProviderTree inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 943.275502] env[69475]: DEBUG nova.compute.provider_tree [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 943.283294] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508451, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.292155] env[69475]: DEBUG nova.compute.manager [req-550b7656-c229-4815-923c-c09c71ef071a req-f5f742bb-8e8e-4275-ac78-2e8f1286cc0e service nova] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Received event network-changed-d77fc39a-89ae-47b6-8770-a620acc4eab3 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 943.295078] env[69475]: DEBUG nova.compute.manager [req-550b7656-c229-4815-923c-c09c71ef071a req-f5f742bb-8e8e-4275-ac78-2e8f1286cc0e service nova] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Refreshing instance network info cache due to event network-changed-d77fc39a-89ae-47b6-8770-a620acc4eab3. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 943.295078] env[69475]: DEBUG oslo_concurrency.lockutils [req-550b7656-c229-4815-923c-c09c71ef071a req-f5f742bb-8e8e-4275-ac78-2e8f1286cc0e service nova] Acquiring lock "refresh_cache-c9b2f701-a73a-4561-b637-62e3ce98a44f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.295720] env[69475]: DEBUG oslo_concurrency.lockutils [req-550b7656-c229-4815-923c-c09c71ef071a req-f5f742bb-8e8e-4275-ac78-2e8f1286cc0e service nova] Acquired lock "refresh_cache-c9b2f701-a73a-4561-b637-62e3ce98a44f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 943.296092] env[69475]: DEBUG nova.network.neutron [req-550b7656-c229-4815-923c-c09c71ef071a req-f5f742bb-8e8e-4275-ac78-2e8f1286cc0e service nova] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Refreshing network info cache for port d77fc39a-89ae-47b6-8770-a620acc4eab3 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 943.298634] env[69475]: DEBUG nova.scheduler.client.report [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Refreshing aggregate associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, aggregates: None {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 943.313110] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 943.313110] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526be8eb-3ea9-4415-65ad-a38db5a6db72" [ 943.313110] env[69475]: _type = "HttpNfcLease" [ 943.313110] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 943.313906] env[69475]: DEBUG oslo_vmware.rw_handles [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 943.313906] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526be8eb-3ea9-4415-65ad-a38db5a6db72" [ 943.313906] env[69475]: _type = "HttpNfcLease" [ 943.313906] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 943.315115] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e89a97-f837-491f-8059-464706c9ebb9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.326017] env[69475]: DEBUG oslo_vmware.rw_handles [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d6a5e0-7f0f-30bd-6699-7e142894ab1b/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 943.326017] env[69475]: DEBUG oslo_vmware.rw_handles [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d6a5e0-7f0f-30bd-6699-7e142894ab1b/disk-0.vmdk for reading. {{(pid=69475) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 943.389982] env[69475]: DEBUG nova.scheduler.client.report [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Refreshing trait associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 943.405231] env[69475]: DEBUG oslo_vmware.api [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': task-3508448, 'name': PowerOnVM_Task, 'duration_secs': 0.855605} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.405534] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 943.405821] env[69475]: INFO nova.compute.manager [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Took 9.45 seconds to spawn the instance on the hypervisor. [ 943.405947] env[69475]: DEBUG nova.compute.manager [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 943.406850] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-091d5013-37dc-49f5-b23a-9518df23e301 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.422732] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508449, 'name': CreateVM_Task, 'duration_secs': 0.536748} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.425438] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 943.426135] env[69475]: DEBUG oslo_concurrency.lockutils [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.426301] env[69475]: DEBUG oslo_concurrency.lockutils [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 943.426603] env[69475]: DEBUG oslo_concurrency.lockutils [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 943.428728] env[69475]: DEBUG oslo_vmware.rw_handles [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ebd675-5721-5a0c-969d-ff577b416132/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 943.429114] env[69475]: INFO nova.compute.manager [-] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Took 2.57 seconds to deallocate network for instance. [ 943.429314] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56a5289e-bc77-475b-989d-8b95ae5b4c8c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.432046] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7756b8f-2df5-4186-b627-083a075ef29d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.444699] env[69475]: DEBUG oslo_vmware.rw_handles [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ebd675-5721-5a0c-969d-ff577b416132/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 943.444863] env[69475]: ERROR oslo_vmware.rw_handles [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ebd675-5721-5a0c-969d-ff577b416132/disk-0.vmdk due to incomplete transfer. [ 943.445170] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Waiting for the task: (returnval){ [ 943.445170] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f3757-3889-a256-f49e-b6176688ce48" [ 943.445170] env[69475]: _type = "Task" [ 943.445170] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.446819] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-75ca73fd-e20d-46b3-aac7-a05bb5a2b3f7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.458601] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f3757-3889-a256-f49e-b6176688ce48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.459872] env[69475]: DEBUG oslo_vmware.rw_handles [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ebd675-5721-5a0c-969d-ff577b416132/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 943.460082] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Uploaded image fc07f6a7-0a66-45fb-9298-51f7f2cb2e41 to the Glance image server {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 943.462391] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 943.462667] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-38cc04ab-b6ee-4450-9116-fb9ec73e39d3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.473690] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 943.473690] env[69475]: value = "task-3508452" [ 943.473690] env[69475]: _type = "Task" [ 943.473690] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.481418] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9dba6603-1249-43a8-9186-d06043d72d5a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.488183] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508452, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.711341] env[69475]: DEBUG nova.network.neutron [-] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.779888] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508451, 'name': PowerOffVM_Task, 'duration_secs': 0.350828} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.783149] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 943.784464] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a9cbe3b-f211-44b9-b551-6297c7f90d5c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.810426] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e776fcd-9190-489d-9160-8ae6e6b50e07 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.851121] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 943.851121] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-07cdf312-b894-4b1d-9b92-0de52cb9e3d0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.859711] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for the task: (returnval){ [ 943.859711] env[69475]: value = "task-3508453" [ 943.859711] env[69475]: _type = "Task" [ 943.859711] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.870684] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508453, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.883945] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5167acb5-a828-4f82-8ad6-b450a12bc1cc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.892037] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5d42800-96fb-436b-a304-786f33b98f42 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.924123] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2426767d-4d8e-43a4-aac4-6a029e7fe1c5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.937000] env[69475]: INFO nova.compute.manager [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Took 29.20 seconds to build instance. [ 943.942029] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f18977cf-74ae-48d4-853a-4f115199dd4e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.946831] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.960145] env[69475]: DEBUG nova.compute.provider_tree [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 943.971340] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f3757-3889-a256-f49e-b6176688ce48, 'name': SearchDatastore_Task, 'duration_secs': 0.01927} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.972053] env[69475]: DEBUG oslo_concurrency.lockutils [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.972440] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 943.972558] env[69475]: DEBUG oslo_concurrency.lockutils [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.972656] env[69475]: DEBUG oslo_concurrency.lockutils [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 943.972831] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 943.976679] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-359571dd-f285-41d1-9fd0-7ad09678ae79 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.985160] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508452, 'name': Destroy_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.988946] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 943.989155] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 943.989965] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6ecd704-48fa-4e66-b487-c4ad3a1ba339 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.996132] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Waiting for the task: (returnval){ [ 943.996132] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523b0f6d-19b9-5a33-7983-5f4b82d3a9f3" [ 943.996132] env[69475]: _type = "Task" [ 943.996132] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.007430] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523b0f6d-19b9-5a33-7983-5f4b82d3a9f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.199444] env[69475]: DEBUG nova.network.neutron [req-550b7656-c229-4815-923c-c09c71ef071a req-f5f742bb-8e8e-4275-ac78-2e8f1286cc0e service nova] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Updated VIF entry in instance network info cache for port d77fc39a-89ae-47b6-8770-a620acc4eab3. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 944.199904] env[69475]: DEBUG nova.network.neutron [req-550b7656-c229-4815-923c-c09c71ef071a req-f5f742bb-8e8e-4275-ac78-2e8f1286cc0e service nova] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Updating instance_info_cache with network_info: [{"id": "d77fc39a-89ae-47b6-8770-a620acc4eab3", "address": "fa:16:3e:ea:d2:ac", "network": {"id": "cdd4d2b4-ed84-46e1-8f22-c35e9bb2a79e", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1352479680-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "85953f12c4d7442993b8212939a14c35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e68d1a61-2c49-4777-87c4-5eb73c467ad3", "external-id": "nsx-vlan-transportzone-52", "segmentation_id": 52, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd77fc39a-89", "ovs_interfaceid": "d77fc39a-89ae-47b6-8770-a620acc4eab3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 944.214824] env[69475]: INFO nova.compute.manager [-] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Took 1.45 seconds to deallocate network for instance. [ 944.372877] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] VM already powered off {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 944.373248] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 944.373522] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.440629] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f0994d01-2dea-4aa3-b195-40beda5733aa tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Lock "d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.723s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.483283] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508452, 'name': Destroy_Task, 'duration_secs': 0.759102} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.483759] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Destroyed the VM [ 944.484197] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 944.484630] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6399056c-ba56-450a-bf9b-83ce869f318f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.493416] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 944.493416] env[69475]: value = "task-3508454" [ 944.493416] env[69475]: _type = "Task" [ 944.493416] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.503744] env[69475]: DEBUG nova.scheduler.client.report [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Updated inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da with generation 117 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 944.504423] env[69475]: DEBUG nova.compute.provider_tree [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Updating resource provider dd221100-68c1-4a75-92b5-b24d81fee5da generation from 117 to 118 during operation: update_inventory {{(pid=69475) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 944.504598] env[69475]: DEBUG nova.compute.provider_tree [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 944.517042] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523b0f6d-19b9-5a33-7983-5f4b82d3a9f3, 'name': SearchDatastore_Task, 'duration_secs': 0.030841} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.518889] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508454, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.518889] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4494c590-ab81-4e50-b5b8-c9da3a70aea5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.527153] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Waiting for the task: (returnval){ [ 944.527153] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f27450-6941-c599-f518-d96769308015" [ 944.527153] env[69475]: _type = "Task" [ 944.527153] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.540350] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f27450-6941-c599-f518-d96769308015, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.703732] env[69475]: DEBUG oslo_concurrency.lockutils [req-550b7656-c229-4815-923c-c09c71ef071a req-f5f742bb-8e8e-4275-ac78-2e8f1286cc0e service nova] Releasing lock "refresh_cache-c9b2f701-a73a-4561-b637-62e3ce98a44f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 944.704085] env[69475]: DEBUG nova.compute.manager [req-550b7656-c229-4815-923c-c09c71ef071a req-f5f742bb-8e8e-4275-ac78-2e8f1286cc0e service nova] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Received event network-vif-deleted-9e1b604b-8b51-4d1d-a716-b433d77aa5a3 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 944.722504] env[69475]: DEBUG oslo_concurrency.lockutils [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.836225] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Acquiring lock "78b5496c-f8e2-4681-a36b-50897b0f7325" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.836590] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Lock "78b5496c-f8e2-4681-a36b-50897b0f7325" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.837190] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Acquiring lock "78b5496c-f8e2-4681-a36b-50897b0f7325-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.837527] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Lock "78b5496c-f8e2-4681-a36b-50897b0f7325-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.837872] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Lock "78b5496c-f8e2-4681-a36b-50897b0f7325-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.845032] env[69475]: INFO nova.compute.manager [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Terminating instance [ 944.845032] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "4b17d080-594b-44e7-83aa-ebe0787722d9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.845340] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "4b17d080-594b-44e7-83aa-ebe0787722d9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.845628] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "4b17d080-594b-44e7-83aa-ebe0787722d9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.845880] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "4b17d080-594b-44e7-83aa-ebe0787722d9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.846118] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "4b17d080-594b-44e7-83aa-ebe0787722d9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.848787] env[69475]: INFO nova.compute.manager [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Terminating instance [ 945.007349] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508454, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.020241] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.953s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.020984] env[69475]: DEBUG nova.compute.manager [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 945.024674] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.095s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.025979] env[69475]: DEBUG nova.objects.instance [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lazy-loading 'resources' on Instance uuid a3ee83aa-f753-49e3-9db2-b1b67d6d211e {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 945.039345] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f27450-6941-c599-f518-d96769308015, 'name': SearchDatastore_Task, 'duration_secs': 0.013842} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.039710] env[69475]: DEBUG oslo_concurrency.lockutils [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 945.040038] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] c9b2f701-a73a-4561-b637-62e3ce98a44f/c9b2f701-a73a-4561-b637-62e3ce98a44f.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 945.040395] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 945.040658] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 945.040977] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2c390b38-6f7d-4e4d-af68-5462d96845e0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.043785] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ee0d4a9-aace-4411-a45b-ce07129b8be8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.053758] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Waiting for the task: (returnval){ [ 945.053758] env[69475]: value = "task-3508455" [ 945.053758] env[69475]: _type = "Task" [ 945.053758] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.058662] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 945.058946] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 945.060223] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-457bb8a3-07c3-4b33-82ef-7451010b56ad {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.067210] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': task-3508455, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.071631] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for the task: (returnval){ [ 945.071631] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523bc8ee-3f09-a941-fcf0-c6c67732644f" [ 945.071631] env[69475]: _type = "Task" [ 945.071631] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.084539] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523bc8ee-3f09-a941-fcf0-c6c67732644f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.352794] env[69475]: DEBUG nova.compute.manager [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 945.353234] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 945.353881] env[69475]: DEBUG nova.compute.manager [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 945.354255] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 945.355099] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-600b132a-acce-4b01-9920-308bc39fd904 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.359053] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6cd4e8a-e2f8-4dd0-ab7c-96619b7bffb7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.364575] env[69475]: DEBUG nova.compute.manager [req-eaf6f9aa-0842-44c2-ae7c-59fb5cdffe8d req-990a5c29-5296-4813-9a48-23691fe11943 service nova] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Received event network-vif-deleted-9e51856c-7355-448c-82fc-e5af23bb0fcf {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 945.368759] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.370917] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25648bcc-aa18-406b-9eb8-9f0f1e95a01e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.372684] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.373232] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2a5a863a-061d-4c64-800b-03e4343ed4f4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.380875] env[69475]: DEBUG oslo_vmware.api [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 945.380875] env[69475]: value = "task-3508457" [ 945.380875] env[69475]: _type = "Task" [ 945.380875] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.382628] env[69475]: DEBUG oslo_vmware.api [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Waiting for the task: (returnval){ [ 945.382628] env[69475]: value = "task-3508456" [ 945.382628] env[69475]: _type = "Task" [ 945.382628] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.395630] env[69475]: DEBUG oslo_vmware.api [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508457, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.399564] env[69475]: DEBUG oslo_vmware.api [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508456, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.508890] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508454, 'name': RemoveSnapshot_Task, 'duration_secs': 0.709982} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.509229] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 945.509533] env[69475]: DEBUG nova.compute.manager [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 945.510422] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab32809-d3da-4a60-a263-5438c0fc5fe4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.526039] env[69475]: DEBUG nova.compute.utils [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 945.527523] env[69475]: DEBUG nova.compute.manager [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 945.527676] env[69475]: DEBUG nova.network.neutron [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 945.572357] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': task-3508455, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.587834] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523bc8ee-3f09-a941-fcf0-c6c67732644f, 'name': SearchDatastore_Task, 'duration_secs': 0.014812} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.588869] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d7e31bb-526d-48b5-8cf5-10396ae19cc7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.599356] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for the task: (returnval){ [ 945.599356] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526d5622-416f-0136-4b32-7a1fa4756937" [ 945.599356] env[69475]: _type = "Task" [ 945.599356] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.610440] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526d5622-416f-0136-4b32-7a1fa4756937, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.627489] env[69475]: DEBUG nova.policy [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82fb5348c4484685ba3d0589310fb68a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd25a22195d0c4370a481a242a18f430a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 945.642029] env[69475]: DEBUG oslo_concurrency.lockutils [None req-153f1af3-a501-4b01-b43d-0404a9e632bd tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Acquiring lock "d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.642029] env[69475]: DEBUG oslo_concurrency.lockutils [None req-153f1af3-a501-4b01-b43d-0404a9e632bd tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Lock "d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.642029] env[69475]: INFO nova.compute.manager [None req-153f1af3-a501-4b01-b43d-0404a9e632bd tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Rebooting instance [ 945.894901] env[69475]: DEBUG oslo_vmware.api [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508457, 'name': PowerOffVM_Task, 'duration_secs': 0.347256} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.895696] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 945.895935] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 945.896252] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb343e03-966b-4aee-86dc-c74d354af930 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.906764] env[69475]: DEBUG oslo_vmware.api [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508456, 'name': PowerOffVM_Task, 'duration_secs': 0.26008} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.908043] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 945.908204] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 945.908441] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-944171a9-8a9d-49a2-8d0c-c8ccdef957f0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.004906] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 946.005271] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 946.005663] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Deleting the datastore file [datastore1] 4b17d080-594b-44e7-83aa-ebe0787722d9 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 946.006027] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e9fd6243-e204-49dd-8ab0-308e1ebdd7f2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.018028] env[69475]: DEBUG oslo_vmware.api [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 946.018028] env[69475]: value = "task-3508460" [ 946.018028] env[69475]: _type = "Task" [ 946.018028] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.020519] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 946.020741] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 946.020916] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Deleting the datastore file [datastore1] 78b5496c-f8e2-4681-a36b-50897b0f7325 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 946.021667] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4dd650d-2221-49ed-a053-b866ed72fb1f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.029329] env[69475]: DEBUG oslo_vmware.api [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508460, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.033435] env[69475]: DEBUG nova.compute.manager [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 946.036133] env[69475]: DEBUG oslo_vmware.api [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Waiting for the task: (returnval){ [ 946.036133] env[69475]: value = "task-3508461" [ 946.036133] env[69475]: _type = "Task" [ 946.036133] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.036792] env[69475]: INFO nova.compute.manager [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Shelve offloading [ 946.047850] env[69475]: DEBUG oslo_vmware.api [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508461, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.071200] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': task-3508455, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.073150] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e4224e-b7c7-4500-9255-1edd021a7b23 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.084203] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750419ad-1f62-41d3-bc3c-7c91215f47a8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.121951] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-184294b6-e454-40be-a9f0-ba2a79df0676 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.133874] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526d5622-416f-0136-4b32-7a1fa4756937, 'name': SearchDatastore_Task, 'duration_secs': 0.061846} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.134581] env[69475]: DEBUG oslo_concurrency.lockutils [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 946.134853] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 02ba199b-a7dc-421c-a14a-b562da275377/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk. {{(pid=69475) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 946.136143] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cbc5135-8b75-409f-ac69-e54aa39af602 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.140078] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4358bc10-d6ba-4551-ab42-ac05b8f79ecf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.158912] env[69475]: DEBUG nova.compute.provider_tree [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.165202] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for the task: (returnval){ [ 946.165202] env[69475]: value = "task-3508462" [ 946.165202] env[69475]: _type = "Task" [ 946.165202] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.165202] env[69475]: DEBUG oslo_concurrency.lockutils [None req-153f1af3-a501-4b01-b43d-0404a9e632bd tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Acquiring lock "refresh_cache-d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.165202] env[69475]: DEBUG oslo_concurrency.lockutils [None req-153f1af3-a501-4b01-b43d-0404a9e632bd tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Acquired lock "refresh_cache-d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 946.165202] env[69475]: DEBUG nova.network.neutron [None req-153f1af3-a501-4b01-b43d-0404a9e632bd tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 946.176114] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508462, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.259838] env[69475]: DEBUG nova.network.neutron [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Successfully created port: 0241fad0-a699-4ab6-8665-37a808867cd9 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 946.530794] env[69475]: DEBUG oslo_vmware.api [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508460, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.222143} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.530794] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 946.530794] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 946.530794] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 946.530794] env[69475]: INFO nova.compute.manager [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Took 1.17 seconds to destroy the instance on the hypervisor. [ 946.530794] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 946.530794] env[69475]: DEBUG nova.compute.manager [-] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 946.530794] env[69475]: DEBUG nova.network.neutron [-] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 946.545918] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 946.546421] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c2891e21-1206-458c-87cf-c3c7aa3dff71 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.556766] env[69475]: DEBUG oslo_vmware.api [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Task: {'id': task-3508461, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.242977} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.558433] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 946.558698] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 946.558812] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 946.559011] env[69475]: INFO nova.compute.manager [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Took 1.21 seconds to destroy the instance on the hypervisor. [ 946.559283] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 946.559801] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 946.559801] env[69475]: value = "task-3508463" [ 946.559801] env[69475]: _type = "Task" [ 946.559801] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.560085] env[69475]: DEBUG nova.compute.manager [-] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 946.560398] env[69475]: DEBUG nova.network.neutron [-] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 946.577278] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': task-3508455, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.579923] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] VM already powered off {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 946.580174] env[69475]: DEBUG nova.compute.manager [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 946.581013] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16bf55dc-4ee1-4b5f-93b1-87edd5a92b89 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.588903] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "refresh_cache-78430e6a-b0a3-400b-91c4-effea838274a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 946.589193] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "refresh_cache-78430e6a-b0a3-400b-91c4-effea838274a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 946.589298] env[69475]: DEBUG nova.network.neutron [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 946.666811] env[69475]: DEBUG nova.scheduler.client.report [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 946.686486] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508462, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.982387] env[69475]: DEBUG nova.network.neutron [None req-153f1af3-a501-4b01-b43d-0404a9e632bd tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Updating instance_info_cache with network_info: [{"id": "409b371c-7a12-4772-a463-d5e8bc596b60", "address": "fa:16:3e:2b:c1:7e", "network": {"id": "ed1543e1-b33e-4edc-b541-6a2a167af5e3", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1423926111-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "13c35dc0fd1c42b083d2a3c7070ed230", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap409b371c-7a", "ovs_interfaceid": "409b371c-7a12-4772-a463-d5e8bc596b60", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.048422] env[69475]: DEBUG nova.compute.manager [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 947.066897] env[69475]: DEBUG nova.compute.manager [req-468d0ba7-e695-4e87-9cc4-48816f9801e7 req-f5fffcb8-0f7e-49cb-b5dc-89b8a5b95dd5 service nova] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Received event network-vif-deleted-02e51603-f6e4-43c3-9a38-465fab447405 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 947.068585] env[69475]: INFO nova.compute.manager [req-468d0ba7-e695-4e87-9cc4-48816f9801e7 req-f5fffcb8-0f7e-49cb-b5dc-89b8a5b95dd5 service nova] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Neutron deleted interface 02e51603-f6e4-43c3-9a38-465fab447405; detaching it from the instance and deleting it from the info cache [ 947.068585] env[69475]: DEBUG nova.network.neutron [req-468d0ba7-e695-4e87-9cc4-48816f9801e7 req-f5fffcb8-0f7e-49cb-b5dc-89b8a5b95dd5 service nova] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.079317] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': task-3508455, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.732311} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.083300] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] c9b2f701-a73a-4561-b637-62e3ce98a44f/c9b2f701-a73a-4561-b637-62e3ce98a44f.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 947.083683] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 947.085680] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e4737fce-3cd4-4525-aeb1-f3590a2c44ea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.090719] env[69475]: DEBUG nova.virt.hardware [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 947.090971] env[69475]: DEBUG nova.virt.hardware [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 947.091152] env[69475]: DEBUG nova.virt.hardware [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 947.091337] env[69475]: DEBUG nova.virt.hardware [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 947.091482] env[69475]: DEBUG nova.virt.hardware [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 947.091629] env[69475]: DEBUG nova.virt.hardware [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 947.091847] env[69475]: DEBUG nova.virt.hardware [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 947.092031] env[69475]: DEBUG nova.virt.hardware [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 947.092220] env[69475]: DEBUG nova.virt.hardware [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 947.092350] env[69475]: DEBUG nova.virt.hardware [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 947.092525] env[69475]: DEBUG nova.virt.hardware [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 947.096021] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf5dd355-1f42-4e2b-8ed7-ebf4946cfb0e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.100494] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Waiting for the task: (returnval){ [ 947.100494] env[69475]: value = "task-3508464" [ 947.100494] env[69475]: _type = "Task" [ 947.100494] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.109573] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be5e0e4-6351-40ac-9ee2-096b2ad723d1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.119840] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': task-3508464, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.179576] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.155s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.186102] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.422s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.187708] env[69475]: INFO nova.compute.claims [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 947.191563] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508462, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.210370] env[69475]: INFO nova.scheduler.client.report [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Deleted allocations for instance a3ee83aa-f753-49e3-9db2-b1b67d6d211e [ 947.430823] env[69475]: DEBUG nova.network.neutron [-] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.485135] env[69475]: DEBUG oslo_concurrency.lockutils [None req-153f1af3-a501-4b01-b43d-0404a9e632bd tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Releasing lock "refresh_cache-d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.488957] env[69475]: DEBUG nova.network.neutron [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Updating instance_info_cache with network_info: [{"id": "b12c0816-a102-4eeb-b8bd-bc03636b077f", "address": "fa:16:3e:c4:73:07", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb12c0816-a1", "ovs_interfaceid": "b12c0816-a102-4eeb-b8bd-bc03636b077f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.531792] env[69475]: DEBUG nova.compute.manager [req-fba43806-b9e9-4f11-ac31-a61e68b00108 req-17831857-d5a5-40f9-999c-935150ef44ad service nova] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Received event network-vif-deleted-1176c458-2328-4179-b0d0-cbcea8175e66 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 947.531989] env[69475]: INFO nova.compute.manager [req-fba43806-b9e9-4f11-ac31-a61e68b00108 req-17831857-d5a5-40f9-999c-935150ef44ad service nova] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Neutron deleted interface 1176c458-2328-4179-b0d0-cbcea8175e66; detaching it from the instance and deleting it from the info cache [ 947.532121] env[69475]: DEBUG nova.network.neutron [req-fba43806-b9e9-4f11-ac31-a61e68b00108 req-17831857-d5a5-40f9-999c-935150ef44ad service nova] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.561610] env[69475]: DEBUG nova.network.neutron [-] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.571202] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-53323b2b-d4c2-47eb-882e-5e3beb7bb457 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.583488] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce93405-5787-417b-9a04-04b7db85a3b0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.613038] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': task-3508464, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.313669} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.627608] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 947.628491] env[69475]: DEBUG nova.compute.manager [req-468d0ba7-e695-4e87-9cc4-48816f9801e7 req-f5fffcb8-0f7e-49cb-b5dc-89b8a5b95dd5 service nova] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Detach interface failed, port_id=02e51603-f6e4-43c3-9a38-465fab447405, reason: Instance 78b5496c-f8e2-4681-a36b-50897b0f7325 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 947.629906] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f04134-9829-4ebb-91d6-70cea303757f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.654168] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] c9b2f701-a73a-4561-b637-62e3ce98a44f/c9b2f701-a73a-4561-b637-62e3ce98a44f.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 947.654508] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c2fa32a-cec3-43f6-bb03-cb3d3f4796ca {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.677039] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Waiting for the task: (returnval){ [ 947.677039] env[69475]: value = "task-3508465" [ 947.677039] env[69475]: _type = "Task" [ 947.677039] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.683255] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508462, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.244834} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.683885] env[69475]: INFO nova.virt.vmwareapi.ds_util [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 02ba199b-a7dc-421c-a14a-b562da275377/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk. [ 947.684690] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98aa74d2-d245-48b8-be3e-6ab46525c5a6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.690556] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': task-3508465, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.715521] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 02ba199b-a7dc-421c-a14a-b562da275377/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 947.716357] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61a79bfe-fbc1-479e-901e-c992f4955a09 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.732159] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef51c9e5-87ed-45b7-8827-af4b92a727a2 tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "a3ee83aa-f753-49e3-9db2-b1b67d6d211e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.826s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.740617] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for the task: (returnval){ [ 947.740617] env[69475]: value = "task-3508466" [ 947.740617] env[69475]: _type = "Task" [ 947.740617] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 947.751299] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508466, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.933523] env[69475]: INFO nova.compute.manager [-] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Took 1.37 seconds to deallocate network for instance. [ 947.991382] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "refresh_cache-78430e6a-b0a3-400b-91c4-effea838274a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 947.993518] env[69475]: DEBUG nova.compute.manager [None req-153f1af3-a501-4b01-b43d-0404a9e632bd tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 947.994448] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe329f1e-6fbd-4879-bad8-8afc59bd9613 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.010350] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "interface-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.010350] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.010687] env[69475]: DEBUG nova.objects.instance [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lazy-loading 'flavor' on Instance uuid f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 948.034788] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff405d75-9168-4daf-bdc5-7ee70ca7bee6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.047444] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe8348be-ed35-4e4e-9e6b-24ac82183fa5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.064960] env[69475]: INFO nova.compute.manager [-] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Took 1.54 seconds to deallocate network for instance. [ 948.092496] env[69475]: DEBUG nova.compute.manager [req-fba43806-b9e9-4f11-ac31-a61e68b00108 req-17831857-d5a5-40f9-999c-935150ef44ad service nova] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Detach interface failed, port_id=1176c458-2328-4179-b0d0-cbcea8175e66, reason: Instance 4b17d080-594b-44e7-83aa-ebe0787722d9 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 948.188871] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': task-3508465, 'name': ReconfigVM_Task, 'duration_secs': 0.44978} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.189374] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Reconfigured VM instance instance-00000052 to attach disk [datastore1] c9b2f701-a73a-4561-b637-62e3ce98a44f/c9b2f701-a73a-4561-b637-62e3ce98a44f.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 948.189841] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c8593643-1aec-47af-994d-997f2d9b6424 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.199595] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Waiting for the task: (returnval){ [ 948.199595] env[69475]: value = "task-3508467" [ 948.199595] env[69475]: _type = "Task" [ 948.199595] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.208677] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': task-3508467, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.252384] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508466, 'name': ReconfigVM_Task, 'duration_secs': 0.507241} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.252712] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 02ba199b-a7dc-421c-a14a-b562da275377/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 948.253652] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-303bb22a-189e-4ce2-92d7-8bc33b54cee7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.286883] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1378b387-0604-4799-95d7-87007283203c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.303553] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for the task: (returnval){ [ 948.303553] env[69475]: value = "task-3508468" [ 948.303553] env[69475]: _type = "Task" [ 948.303553] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.312542] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508468, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.442077] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.573123] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.612052] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 948.614023] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff93c29c-a4c9-4479-b968-4263d405df6e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.617845] env[69475]: DEBUG nova.objects.instance [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lazy-loading 'pci_requests' on Instance uuid f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 948.627174] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 948.627592] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4c887aec-ac10-48ec-a492-38dccf481c5a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.636805] env[69475]: DEBUG nova.network.neutron [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Successfully updated port: 0241fad0-a699-4ab6-8665-37a808867cd9 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 948.696676] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 948.696955] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 948.697280] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleting the datastore file [datastore2] 78430e6a-b0a3-400b-91c4-effea838274a {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 948.697578] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d10ebb51-155c-4a2c-b8a7-880d9818862b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.710142] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 948.710142] env[69475]: value = "task-3508470" [ 948.710142] env[69475]: _type = "Task" [ 948.710142] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.717240] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': task-3508467, 'name': Rename_Task, 'duration_secs': 0.225347} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.717886] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 948.718153] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d7d8d46-ad7a-4c8c-9830-92f1e580f0de {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.726072] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508470, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.731084] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Waiting for the task: (returnval){ [ 948.731084] env[69475]: value = "task-3508471" [ 948.731084] env[69475]: _type = "Task" [ 948.731084] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.742629] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': task-3508471, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 948.756170] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91726aea-8590-4f8f-8274-46335f853c6f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.764673] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9e342ff-dbff-41b8-9f96-63af9bba9eb5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.797109] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3c4fdc-dfc3-4d43-902e-7aa3e3264af0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.808533] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6652fc53-7731-425f-8f43-3cfeec4c6f7e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.825596] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508468, 'name': ReconfigVM_Task, 'duration_secs': 0.277893} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 948.826067] env[69475]: DEBUG nova.compute.provider_tree [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 948.827516] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 948.827795] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5303d39a-4d7e-4e8b-a8d4-bafb37090b9a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.837456] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for the task: (returnval){ [ 948.837456] env[69475]: value = "task-3508472" [ 948.837456] env[69475]: _type = "Task" [ 948.837456] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 948.847374] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508472, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.013877] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4398665d-6228-4194-b7fb-f985c054077a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.025020] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-153f1af3-a501-4b01-b43d-0404a9e632bd tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Doing hard reboot of VM {{(pid=69475) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 949.025020] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-8b8179c2-1901-4da7-b114-ea2d72f1b535 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.034020] env[69475]: DEBUG oslo_vmware.api [None req-153f1af3-a501-4b01-b43d-0404a9e632bd tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Waiting for the task: (returnval){ [ 949.034020] env[69475]: value = "task-3508473" [ 949.034020] env[69475]: _type = "Task" [ 949.034020] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.041727] env[69475]: DEBUG oslo_vmware.api [None req-153f1af3-a501-4b01-b43d-0404a9e632bd tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': task-3508473, 'name': ResetVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.121049] env[69475]: DEBUG nova.objects.base [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 949.121358] env[69475]: DEBUG nova.network.neutron [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 949.139766] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.140039] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquired lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.140039] env[69475]: DEBUG nova.network.neutron [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 949.178057] env[69475]: DEBUG nova.policy [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc345af1a2c34fba98fa191b637a284a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2ba1a4125454d39bc92b6123447d98a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 949.222226] env[69475]: DEBUG oslo_vmware.api [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508470, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.348587} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.222461] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 949.222639] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 949.222810] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 949.244253] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': task-3508471, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.245392] env[69475]: INFO nova.scheduler.client.report [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleted allocations for instance 78430e6a-b0a3-400b-91c4-effea838274a [ 949.331774] env[69475]: DEBUG nova.scheduler.client.report [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 949.351747] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508472, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.388646] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "86647493-8b2c-46bd-94d3-c973e843f778" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.388949] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "86647493-8b2c-46bd-94d3-c973e843f778" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.389246] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "86647493-8b2c-46bd-94d3-c973e843f778-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.389422] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "86647493-8b2c-46bd-94d3-c973e843f778-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.389610] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "86647493-8b2c-46bd-94d3-c973e843f778-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.391988] env[69475]: INFO nova.compute.manager [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Terminating instance [ 949.543799] env[69475]: DEBUG oslo_vmware.api [None req-153f1af3-a501-4b01-b43d-0404a9e632bd tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': task-3508473, 'name': ResetVM_Task, 'duration_secs': 0.111856} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.544146] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-153f1af3-a501-4b01-b43d-0404a9e632bd tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Did hard reboot of VM {{(pid=69475) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 949.544345] env[69475]: DEBUG nova.compute.manager [None req-153f1af3-a501-4b01-b43d-0404a9e632bd tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 949.545196] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61cc5990-474d-41aa-8b39-129f051c7c34 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.674630] env[69475]: DEBUG nova.network.neutron [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 949.745059] env[69475]: DEBUG oslo_vmware.api [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': task-3508471, 'name': PowerOnVM_Task, 'duration_secs': 0.732742} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.748054] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 949.748286] env[69475]: INFO nova.compute.manager [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Took 10.89 seconds to spawn the instance on the hypervisor. [ 949.748465] env[69475]: DEBUG nova.compute.manager [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 949.749466] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.750243] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0132f15a-358e-4406-8bf2-c20bdad99818 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.835359] env[69475]: DEBUG nova.network.neutron [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Updating instance_info_cache with network_info: [{"id": "0241fad0-a699-4ab6-8665-37a808867cd9", "address": "fa:16:3e:9e:27:a0", "network": {"id": "77196001-28c0-48c7-924d-a11c93289ae5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-829999270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d25a22195d0c4370a481a242a18f430a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0241fad0-a6", "ovs_interfaceid": "0241fad0-a699-4ab6-8665-37a808867cd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.838600] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.652s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.839198] env[69475]: DEBUG nova.compute.manager [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 949.845095] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.237s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.845095] env[69475]: INFO nova.compute.claims [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 949.858249] env[69475]: DEBUG oslo_vmware.api [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508472, 'name': PowerOnVM_Task, 'duration_secs': 0.693081} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 949.858512] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 949.861772] env[69475]: DEBUG nova.compute.manager [None req-13471b3a-4e86-4136-a2c6-4325655c552a tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 949.862810] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5fa11e-e95d-448a-bad8-4cfe71a9f037 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.868197] env[69475]: DEBUG nova.network.neutron [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Successfully created port: eb5ab964-44c1-4189-9805-f3c80abb01ca {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 949.897304] env[69475]: DEBUG nova.compute.manager [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 949.897304] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 949.900105] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3608c637-04f5-45a4-b056-508921a4eac4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.910034] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 949.910294] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e42242ee-e90b-4968-b726-1991500a7ca1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.917929] env[69475]: DEBUG oslo_vmware.api [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 949.917929] env[69475]: value = "task-3508474" [ 949.917929] env[69475]: _type = "Task" [ 949.917929] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.928091] env[69475]: DEBUG oslo_vmware.api [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508474, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.059980] env[69475]: DEBUG oslo_concurrency.lockutils [None req-153f1af3-a501-4b01-b43d-0404a9e632bd tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Lock "d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.419s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.271671] env[69475]: INFO nova.compute.manager [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Took 33.96 seconds to build instance. [ 950.338958] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Releasing lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.339216] env[69475]: DEBUG nova.compute.manager [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Instance network_info: |[{"id": "0241fad0-a699-4ab6-8665-37a808867cd9", "address": "fa:16:3e:9e:27:a0", "network": {"id": "77196001-28c0-48c7-924d-a11c93289ae5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-829999270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d25a22195d0c4370a481a242a18f430a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0241fad0-a6", "ovs_interfaceid": "0241fad0-a699-4ab6-8665-37a808867cd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 950.339658] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:27:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0549820d-5649-40bc-ad6e-9ae27b384d90', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0241fad0-a699-4ab6-8665-37a808867cd9', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 950.347333] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Creating folder: Project (d25a22195d0c4370a481a242a18f430a). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 950.347678] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e0eab38-7131-496b-8c53-1935be04334f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.353064] env[69475]: DEBUG nova.compute.utils [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 950.354375] env[69475]: DEBUG nova.compute.manager [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 950.354535] env[69475]: DEBUG nova.network.neutron [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 950.371699] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Created folder: Project (d25a22195d0c4370a481a242a18f430a) in parent group-v700823. [ 950.371923] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Creating folder: Instances. Parent ref: group-v701053. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 950.373041] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7499ae22-8490-4839-a0d2-ad18894a68dc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.387496] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Created folder: Instances in parent group-v701053. [ 950.387834] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 950.388095] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 950.388517] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a59dac19-a228-4f2f-ae75-cebd4c0d1ac9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.406307] env[69475]: DEBUG nova.policy [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '50223677b1b84004ad2ae335882b0bf2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52941494ff1643f6bb75cc1320a86b88', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 950.423065] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 950.423065] env[69475]: value = "task-3508477" [ 950.423065] env[69475]: _type = "Task" [ 950.423065] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.431041] env[69475]: DEBUG oslo_vmware.api [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508474, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.434270] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508477, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.511834] env[69475]: DEBUG nova.compute.manager [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Received event network-vif-plugged-0241fad0-a699-4ab6-8665-37a808867cd9 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 950.512107] env[69475]: DEBUG oslo_concurrency.lockutils [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] Acquiring lock "8f18d683-7734-4798-8963-7336fe229f16-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.512691] env[69475]: DEBUG oslo_concurrency.lockutils [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] Lock "8f18d683-7734-4798-8963-7336fe229f16-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.512691] env[69475]: DEBUG oslo_concurrency.lockutils [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] Lock "8f18d683-7734-4798-8963-7336fe229f16-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.512691] env[69475]: DEBUG nova.compute.manager [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] No waiting events found dispatching network-vif-plugged-0241fad0-a699-4ab6-8665-37a808867cd9 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 950.512979] env[69475]: WARNING nova.compute.manager [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Received unexpected event network-vif-plugged-0241fad0-a699-4ab6-8665-37a808867cd9 for instance with vm_state building and task_state spawning. [ 950.512979] env[69475]: DEBUG nova.compute.manager [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Received event network-vif-unplugged-b12c0816-a102-4eeb-b8bd-bc03636b077f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 950.513203] env[69475]: DEBUG oslo_concurrency.lockutils [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] Acquiring lock "78430e6a-b0a3-400b-91c4-effea838274a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.513471] env[69475]: DEBUG oslo_concurrency.lockutils [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] Lock "78430e6a-b0a3-400b-91c4-effea838274a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.513679] env[69475]: DEBUG oslo_concurrency.lockutils [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] Lock "78430e6a-b0a3-400b-91c4-effea838274a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.513880] env[69475]: DEBUG nova.compute.manager [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] No waiting events found dispatching network-vif-unplugged-b12c0816-a102-4eeb-b8bd-bc03636b077f {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 950.514102] env[69475]: WARNING nova.compute.manager [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Received unexpected event network-vif-unplugged-b12c0816-a102-4eeb-b8bd-bc03636b077f for instance with vm_state shelved_offloaded and task_state None. [ 950.514284] env[69475]: DEBUG nova.compute.manager [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Received event network-changed-b12c0816-a102-4eeb-b8bd-bc03636b077f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 950.514467] env[69475]: DEBUG nova.compute.manager [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Refreshing instance network info cache due to event network-changed-b12c0816-a102-4eeb-b8bd-bc03636b077f. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 950.514670] env[69475]: DEBUG oslo_concurrency.lockutils [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] Acquiring lock "refresh_cache-78430e6a-b0a3-400b-91c4-effea838274a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.514812] env[69475]: DEBUG oslo_concurrency.lockutils [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] Acquired lock "refresh_cache-78430e6a-b0a3-400b-91c4-effea838274a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 950.514969] env[69475]: DEBUG nova.network.neutron [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Refreshing network info cache for port b12c0816-a102-4eeb-b8bd-bc03636b077f {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 950.698836] env[69475]: DEBUG nova.network.neutron [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Successfully created port: 325d7757-39de-4455-954e-feb4a1be8355 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 950.774354] env[69475]: DEBUG oslo_concurrency.lockutils [None req-821d2214-56d6-4af2-876d-33b44cb12b25 tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Lock "c9b2f701-a73a-4561-b637-62e3ce98a44f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.476s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.861640] env[69475]: DEBUG nova.compute.manager [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 950.936334] env[69475]: DEBUG oslo_vmware.api [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508474, 'name': PowerOffVM_Task, 'duration_secs': 0.583293} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.937203] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 950.937538] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 950.937950] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e521098-3f1b-4e9f-a130-a4b2c4e013f7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.944159] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508477, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.028449] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 951.028690] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 951.028900] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Deleting the datastore file [datastore1] 86647493-8b2c-46bd-94d3-c973e843f778 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 951.029248] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7872a9a8-1f9f-44da-8ea5-a0214aff7c80 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.045225] env[69475]: DEBUG oslo_vmware.api [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for the task: (returnval){ [ 951.045225] env[69475]: value = "task-3508479" [ 951.045225] env[69475]: _type = "Task" [ 951.045225] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.058944] env[69475]: DEBUG oslo_vmware.api [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508479, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.349367] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c019232b-1db7-4557-a74b-3125eee3c64a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.358747] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5803e8d3-944a-4a86-87cd-463ca7ae2d10 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.406952] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de23e2d3-fcda-4d71-a39c-b253133c8795 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.417629] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7491aeee-b031-49c9-9141-a9b9c35bb3c7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.438631] env[69475]: DEBUG nova.compute.provider_tree [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.447157] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508477, 'name': CreateVM_Task, 'duration_secs': 0.581387} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.447157] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 951.447499] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.447685] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.448010] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 951.448742] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a04b98f7-03d0-4bab-9fa5-dbf1798846e0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.456074] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 951.456074] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5240e349-a87c-00c6-33ac-07e77489a6c3" [ 951.456074] env[69475]: _type = "Task" [ 951.456074] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.466243] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5240e349-a87c-00c6-33ac-07e77489a6c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.490519] env[69475]: DEBUG nova.network.neutron [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Updated VIF entry in instance network info cache for port b12c0816-a102-4eeb-b8bd-bc03636b077f. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 951.490946] env[69475]: DEBUG nova.network.neutron [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Updating instance_info_cache with network_info: [{"id": "b12c0816-a102-4eeb-b8bd-bc03636b077f", "address": "fa:16:3e:c4:73:07", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": null, "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapb12c0816-a1", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.560506] env[69475]: DEBUG oslo_vmware.api [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Task: {'id': task-3508479, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.443163} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.560969] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 951.561285] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 951.561575] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 951.561828] env[69475]: INFO nova.compute.manager [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Took 1.67 seconds to destroy the instance on the hypervisor. [ 951.562201] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 951.562477] env[69475]: DEBUG nova.compute.manager [-] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 951.562617] env[69475]: DEBUG nova.network.neutron [-] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 951.767485] env[69475]: DEBUG nova.network.neutron [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Successfully updated port: eb5ab964-44c1-4189-9805-f3c80abb01ca {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 951.906208] env[69475]: DEBUG nova.compute.manager [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 951.934910] env[69475]: DEBUG nova.virt.hardware [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 951.935789] env[69475]: DEBUG nova.virt.hardware [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 951.935991] env[69475]: DEBUG nova.virt.hardware [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 951.936256] env[69475]: DEBUG nova.virt.hardware [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 951.936509] env[69475]: DEBUG nova.virt.hardware [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 951.936649] env[69475]: DEBUG nova.virt.hardware [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 951.936910] env[69475]: DEBUG nova.virt.hardware [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 951.937134] env[69475]: DEBUG nova.virt.hardware [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 951.938092] env[69475]: DEBUG nova.virt.hardware [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 951.938092] env[69475]: DEBUG nova.virt.hardware [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 951.938092] env[69475]: DEBUG nova.virt.hardware [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 951.939149] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae97540-7c88-4fb8-a76f-7771a067d7c0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.944145] env[69475]: DEBUG nova.scheduler.client.report [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 951.954405] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b353263-dc43-4112-a986-48477710efa3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.982948] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5240e349-a87c-00c6-33ac-07e77489a6c3, 'name': SearchDatastore_Task, 'duration_secs': 0.027541} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.983574] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 951.983683] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 951.983933] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.984088] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.984263] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 951.984543] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26333e2e-3720-41a3-aeda-266c9cf6e486 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.994019] env[69475]: DEBUG oslo_concurrency.lockutils [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] Releasing lock "refresh_cache-78430e6a-b0a3-400b-91c4-effea838274a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 951.994330] env[69475]: DEBUG nova.compute.manager [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Received event network-changed-0241fad0-a699-4ab6-8665-37a808867cd9 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 951.994498] env[69475]: DEBUG nova.compute.manager [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Refreshing instance network info cache due to event network-changed-0241fad0-a699-4ab6-8665-37a808867cd9. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 951.994712] env[69475]: DEBUG oslo_concurrency.lockutils [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] Acquiring lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.994858] env[69475]: DEBUG oslo_concurrency.lockutils [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] Acquired lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.995011] env[69475]: DEBUG nova.network.neutron [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Refreshing network info cache for port 0241fad0-a699-4ab6-8665-37a808867cd9 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 951.997318] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 951.997553] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 951.998584] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62217553-bd11-47b7-8366-7e445cafe04d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.007708] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 952.007708] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52aece3b-25e7-2213-4036-25f7c3dadce7" [ 952.007708] env[69475]: _type = "Task" [ 952.007708] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.019253] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52aece3b-25e7-2213-4036-25f7c3dadce7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.236680] env[69475]: DEBUG oslo_vmware.rw_handles [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d6a5e0-7f0f-30bd-6699-7e142894ab1b/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 952.237672] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ccdb49-a1ec-4ecd-9a5d-13072be8dff7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.246742] env[69475]: DEBUG oslo_vmware.rw_handles [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d6a5e0-7f0f-30bd-6699-7e142894ab1b/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 952.246742] env[69475]: ERROR oslo_vmware.rw_handles [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d6a5e0-7f0f-30bd-6699-7e142894ab1b/disk-0.vmdk due to incomplete transfer. [ 952.246742] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-caa2e8e0-a355-445f-8b9f-de5b39b27536 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.255413] env[69475]: DEBUG oslo_vmware.rw_handles [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52d6a5e0-7f0f-30bd-6699-7e142894ab1b/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 952.255630] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Uploaded image 71a76a24-1a96-4056-b949-89270aaca820 to the Glance image server {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 952.257588] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 952.257896] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-581a0f2d-c90c-4326-bd5b-8f90b138c1e7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.265471] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 952.265471] env[69475]: value = "task-3508480" [ 952.265471] env[69475]: _type = "Task" [ 952.265471] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.269869] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "refresh_cache-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.270107] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "refresh_cache-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.270291] env[69475]: DEBUG nova.network.neutron [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 952.278306] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508480, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.390321] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a742834b-b064-426c-96be-734ce620cf19 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "78430e6a-b0a3-400b-91c4-effea838274a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 952.411174] env[69475]: DEBUG nova.network.neutron [-] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.450045] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.607s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 952.450376] env[69475]: DEBUG nova.compute.manager [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 952.453322] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.255s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 952.455412] env[69475]: INFO nova.compute.claims [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 952.459501] env[69475]: DEBUG nova.network.neutron [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Successfully updated port: 325d7757-39de-4455-954e-feb4a1be8355 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 952.520272] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52aece3b-25e7-2213-4036-25f7c3dadce7, 'name': SearchDatastore_Task, 'duration_secs': 0.012997} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.521111] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa493975-aa55-490b-9ed4-238b09d71aa5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.527520] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 952.527520] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5242ecaf-5036-a8cf-b31a-a083a7c9d991" [ 952.527520] env[69475]: _type = "Task" [ 952.527520] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.540755] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5242ecaf-5036-a8cf-b31a-a083a7c9d991, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.734019] env[69475]: DEBUG nova.network.neutron [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Updated VIF entry in instance network info cache for port 0241fad0-a699-4ab6-8665-37a808867cd9. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 952.734373] env[69475]: DEBUG nova.network.neutron [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Updating instance_info_cache with network_info: [{"id": "0241fad0-a699-4ab6-8665-37a808867cd9", "address": "fa:16:3e:9e:27:a0", "network": {"id": "77196001-28c0-48c7-924d-a11c93289ae5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-829999270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d25a22195d0c4370a481a242a18f430a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0241fad0-a6", "ovs_interfaceid": "0241fad0-a699-4ab6-8665-37a808867cd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.778357] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508480, 'name': Destroy_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.810351] env[69475]: WARNING nova.network.neutron [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] 801aee55-f715-4cdf-b89c-184ca3f24866 already exists in list: networks containing: ['801aee55-f715-4cdf-b89c-184ca3f24866']. ignoring it [ 952.917955] env[69475]: INFO nova.compute.manager [-] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Took 1.36 seconds to deallocate network for instance. [ 952.965206] env[69475]: DEBUG nova.compute.utils [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 952.970665] env[69475]: DEBUG nova.compute.manager [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 952.970665] env[69475]: DEBUG nova.network.neutron [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 952.972803] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "refresh_cache-4066a18f-acc5-49b5-941c-0711f29bdcd2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.972803] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "refresh_cache-4066a18f-acc5-49b5-941c-0711f29bdcd2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.972803] env[69475]: DEBUG nova.network.neutron [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 953.032788] env[69475]: DEBUG nova.policy [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb93c2f0a3554be8b25cde370a4083ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de2b24bdabce45a7884bdce4ed781c79', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 953.041764] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5242ecaf-5036-a8cf-b31a-a083a7c9d991, 'name': SearchDatastore_Task, 'duration_secs': 0.016889} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.042183] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 953.042546] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 8f18d683-7734-4798-8963-7336fe229f16/8f18d683-7734-4798-8963-7336fe229f16.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 953.042899] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dfcfb47c-fab5-4e5c-a91c-a0458a2a4b21 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.057701] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 953.057701] env[69475]: value = "task-3508481" [ 953.057701] env[69475]: _type = "Task" [ 953.057701] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.066265] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508481, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.108020] env[69475]: DEBUG nova.compute.manager [req-e13124ea-21dd-4d0d-8d95-df8a831fde8b req-637abc62-31b4-46d4-aa52-846d051201d2 service nova] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Received event network-changed-d77fc39a-89ae-47b6-8770-a620acc4eab3 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 953.108020] env[69475]: DEBUG nova.compute.manager [req-e13124ea-21dd-4d0d-8d95-df8a831fde8b req-637abc62-31b4-46d4-aa52-846d051201d2 service nova] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Refreshing instance network info cache due to event network-changed-d77fc39a-89ae-47b6-8770-a620acc4eab3. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 953.108020] env[69475]: DEBUG oslo_concurrency.lockutils [req-e13124ea-21dd-4d0d-8d95-df8a831fde8b req-637abc62-31b4-46d4-aa52-846d051201d2 service nova] Acquiring lock "refresh_cache-c9b2f701-a73a-4561-b637-62e3ce98a44f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.108020] env[69475]: DEBUG oslo_concurrency.lockutils [req-e13124ea-21dd-4d0d-8d95-df8a831fde8b req-637abc62-31b4-46d4-aa52-846d051201d2 service nova] Acquired lock "refresh_cache-c9b2f701-a73a-4561-b637-62e3ce98a44f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 953.108020] env[69475]: DEBUG nova.network.neutron [req-e13124ea-21dd-4d0d-8d95-df8a831fde8b req-637abc62-31b4-46d4-aa52-846d051201d2 service nova] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Refreshing network info cache for port d77fc39a-89ae-47b6-8770-a620acc4eab3 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 953.131743] env[69475]: DEBUG nova.network.neutron [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Updating instance_info_cache with network_info: [{"id": "30f10284-138a-4774-b024-33ffa906ef81", "address": "fa:16:3e:49:ea:cb", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30f10284-13", "ovs_interfaceid": "30f10284-138a-4774-b024-33ffa906ef81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "eb5ab964-44c1-4189-9805-f3c80abb01ca", "address": "fa:16:3e:72:75:cb", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb5ab964-44", "ovs_interfaceid": "eb5ab964-44c1-4189-9805-f3c80abb01ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.145640] env[69475]: DEBUG oslo_concurrency.lockutils [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Acquiring lock "d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.145863] env[69475]: DEBUG oslo_concurrency.lockutils [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Lock "d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.146033] env[69475]: DEBUG oslo_concurrency.lockutils [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Acquiring lock "d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.146223] env[69475]: DEBUG oslo_concurrency.lockutils [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Lock "d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.146391] env[69475]: DEBUG oslo_concurrency.lockutils [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Lock "d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.149324] env[69475]: INFO nova.compute.manager [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Terminating instance [ 953.237161] env[69475]: DEBUG oslo_concurrency.lockutils [req-17b4ed6a-f814-4a08-a249-46e74aab3164 req-76e12865-c80b-4904-b329-d8c12da0223a service nova] Releasing lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 953.276858] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508480, 'name': Destroy_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.430949] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.463616] env[69475]: DEBUG nova.network.neutron [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Successfully created port: de52f276-c28b-45f5-8248-9019b9765828 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 953.475444] env[69475]: DEBUG nova.compute.manager [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 953.513579] env[69475]: DEBUG nova.network.neutron [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 953.581314] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508481, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.635653] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "refresh_cache-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 953.636646] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.636874] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 953.639887] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d022cca0-66c8-42c3-a451-44e26c17f467 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.671550] env[69475]: DEBUG nova.compute.manager [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 953.671787] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 953.672805] env[69475]: DEBUG nova.virt.hardware [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 953.672805] env[69475]: DEBUG nova.virt.hardware [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 953.673141] env[69475]: DEBUG nova.virt.hardware [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 953.673141] env[69475]: DEBUG nova.virt.hardware [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 953.673536] env[69475]: DEBUG nova.virt.hardware [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 953.673536] env[69475]: DEBUG nova.virt.hardware [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 953.673724] env[69475]: DEBUG nova.virt.hardware [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 953.673789] env[69475]: DEBUG nova.virt.hardware [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 953.673924] env[69475]: DEBUG nova.virt.hardware [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 953.674146] env[69475]: DEBUG nova.virt.hardware [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 953.674352] env[69475]: DEBUG nova.virt.hardware [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 953.680857] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Reconfiguring VM to attach interface {{(pid=69475) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 953.690762] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ddfc11-f9d4-4847-9cf7-3e8d5b81f37a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.693975] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-316ef39c-896f-4dfa-856f-3cd21dd897aa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.708245] env[69475]: DEBUG nova.network.neutron [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Updating instance_info_cache with network_info: [{"id": "325d7757-39de-4455-954e-feb4a1be8355", "address": "fa:16:3e:de:79:fa", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap325d7757-39", "ovs_interfaceid": "325d7757-39de-4455-954e-feb4a1be8355", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.719550] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 953.719550] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2493f7fa-36d5-47f1-9143-1ffc1ef60195 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.720851] env[69475]: DEBUG oslo_vmware.api [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 953.720851] env[69475]: value = "task-3508482" [ 953.720851] env[69475]: _type = "Task" [ 953.720851] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.730713] env[69475]: DEBUG oslo_vmware.api [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Waiting for the task: (returnval){ [ 953.730713] env[69475]: value = "task-3508483" [ 953.730713] env[69475]: _type = "Task" [ 953.730713] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.734256] env[69475]: DEBUG oslo_vmware.api [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508482, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.745888] env[69475]: DEBUG oslo_vmware.api [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': task-3508483, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.778737] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508480, 'name': Destroy_Task, 'duration_secs': 1.100914} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.779069] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Destroyed the VM [ 953.779260] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 953.779538] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-71afb08a-eaa5-4a8d-a0c9-b7bac591a70d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.788379] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 953.788379] env[69475]: value = "task-3508484" [ 953.788379] env[69475]: _type = "Task" [ 953.788379] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.800363] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508484, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.893937] env[69475]: DEBUG nova.compute.manager [req-31010a46-137d-45fb-b3ab-b524ea439189 req-674d9597-8a36-476f-905a-5ec3393cc478 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Received event network-vif-plugged-eb5ab964-44c1-4189-9805-f3c80abb01ca {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 953.894200] env[69475]: DEBUG oslo_concurrency.lockutils [req-31010a46-137d-45fb-b3ab-b524ea439189 req-674d9597-8a36-476f-905a-5ec3393cc478 service nova] Acquiring lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.894473] env[69475]: DEBUG oslo_concurrency.lockutils [req-31010a46-137d-45fb-b3ab-b524ea439189 req-674d9597-8a36-476f-905a-5ec3393cc478 service nova] Lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.894761] env[69475]: DEBUG oslo_concurrency.lockutils [req-31010a46-137d-45fb-b3ab-b524ea439189 req-674d9597-8a36-476f-905a-5ec3393cc478 service nova] Lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.894899] env[69475]: DEBUG nova.compute.manager [req-31010a46-137d-45fb-b3ab-b524ea439189 req-674d9597-8a36-476f-905a-5ec3393cc478 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] No waiting events found dispatching network-vif-plugged-eb5ab964-44c1-4189-9805-f3c80abb01ca {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 953.895056] env[69475]: WARNING nova.compute.manager [req-31010a46-137d-45fb-b3ab-b524ea439189 req-674d9597-8a36-476f-905a-5ec3393cc478 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Received unexpected event network-vif-plugged-eb5ab964-44c1-4189-9805-f3c80abb01ca for instance with vm_state active and task_state None. [ 953.895243] env[69475]: DEBUG nova.compute.manager [req-31010a46-137d-45fb-b3ab-b524ea439189 req-674d9597-8a36-476f-905a-5ec3393cc478 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Received event network-changed-eb5ab964-44c1-4189-9805-f3c80abb01ca {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 953.895397] env[69475]: DEBUG nova.compute.manager [req-31010a46-137d-45fb-b3ab-b524ea439189 req-674d9597-8a36-476f-905a-5ec3393cc478 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Refreshing instance network info cache due to event network-changed-eb5ab964-44c1-4189-9805-f3c80abb01ca. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 953.895583] env[69475]: DEBUG oslo_concurrency.lockutils [req-31010a46-137d-45fb-b3ab-b524ea439189 req-674d9597-8a36-476f-905a-5ec3393cc478 service nova] Acquiring lock "refresh_cache-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.895719] env[69475]: DEBUG oslo_concurrency.lockutils [req-31010a46-137d-45fb-b3ab-b524ea439189 req-674d9597-8a36-476f-905a-5ec3393cc478 service nova] Acquired lock "refresh_cache-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 953.895892] env[69475]: DEBUG nova.network.neutron [req-31010a46-137d-45fb-b3ab-b524ea439189 req-674d9597-8a36-476f-905a-5ec3393cc478 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Refreshing network info cache for port eb5ab964-44c1-4189-9805-f3c80abb01ca {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 954.028042] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c573ae-94dd-4dbf-a8cd-d3d2ebbcdc18 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.036109] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6905c462-7fb7-40fa-aa3b-4920316c656e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.080305] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0256946e-0e8c-4da4-83bb-47cfc487d085 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.089337] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508481, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.843788} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.091620] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 8f18d683-7734-4798-8963-7336fe229f16/8f18d683-7734-4798-8963-7336fe229f16.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 954.091848] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 954.092422] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-24078794-f6a2-4076-be3d-dfdda0c04050 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.095183] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d4d7d8d-ba4f-489b-abb6-e86407a81e63 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.111661] env[69475]: DEBUG nova.compute.provider_tree [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.114478] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 954.114478] env[69475]: value = "task-3508485" [ 954.114478] env[69475]: _type = "Task" [ 954.114478] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.124862] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508485, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.125835] env[69475]: DEBUG nova.network.neutron [req-e13124ea-21dd-4d0d-8d95-df8a831fde8b req-637abc62-31b4-46d4-aa52-846d051201d2 service nova] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Updated VIF entry in instance network info cache for port d77fc39a-89ae-47b6-8770-a620acc4eab3. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 954.126273] env[69475]: DEBUG nova.network.neutron [req-e13124ea-21dd-4d0d-8d95-df8a831fde8b req-637abc62-31b4-46d4-aa52-846d051201d2 service nova] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Updating instance_info_cache with network_info: [{"id": "d77fc39a-89ae-47b6-8770-a620acc4eab3", "address": "fa:16:3e:ea:d2:ac", "network": {"id": "cdd4d2b4-ed84-46e1-8f22-c35e9bb2a79e", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-1352479680-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.174", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "85953f12c4d7442993b8212939a14c35", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e68d1a61-2c49-4777-87c4-5eb73c467ad3", "external-id": "nsx-vlan-transportzone-52", "segmentation_id": 52, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd77fc39a-89", "ovs_interfaceid": "d77fc39a-89ae-47b6-8770-a620acc4eab3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.211410] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "refresh_cache-4066a18f-acc5-49b5-941c-0711f29bdcd2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.211730] env[69475]: DEBUG nova.compute.manager [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Instance network_info: |[{"id": "325d7757-39de-4455-954e-feb4a1be8355", "address": "fa:16:3e:de:79:fa", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap325d7757-39", "ovs_interfaceid": "325d7757-39de-4455-954e-feb4a1be8355", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 954.212247] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:79:fa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '325d7757-39de-4455-954e-feb4a1be8355', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 954.220432] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Creating folder: Project (52941494ff1643f6bb75cc1320a86b88). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 954.220779] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1cf0ac13-2c78-4f9c-b871-91b47216d560 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.232055] env[69475]: DEBUG oslo_vmware.api [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508482, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.238807] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Created folder: Project (52941494ff1643f6bb75cc1320a86b88) in parent group-v700823. [ 954.239062] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Creating folder: Instances. Parent ref: group-v701056. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 954.239666] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-443a503b-bd57-427a-a9a9-7d5b2d1e3cc7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.244759] env[69475]: DEBUG oslo_vmware.api [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': task-3508483, 'name': PowerOffVM_Task, 'duration_secs': 0.467335} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.244975] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 954.245221] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 954.245468] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cda6451c-d3f9-4f98-9fcf-f5e342229805 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.257445] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Created folder: Instances in parent group-v701056. [ 954.257690] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 954.257869] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 954.258096] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-410bab83-6c37-4b38-b955-78ef6db19d6d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.280542] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 954.280542] env[69475]: value = "task-3508489" [ 954.280542] env[69475]: _type = "Task" [ 954.280542] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.288868] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508489, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.298131] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508484, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.325675] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 954.325914] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 954.326114] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Deleting the datastore file [datastore2] d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 954.326399] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1fd6c421-aa37-4537-a1b7-4e8c05c394d0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.334704] env[69475]: DEBUG oslo_vmware.api [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Waiting for the task: (returnval){ [ 954.334704] env[69475]: value = "task-3508490" [ 954.334704] env[69475]: _type = "Task" [ 954.334704] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.343830] env[69475]: DEBUG oslo_vmware.api [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': task-3508490, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.489420] env[69475]: DEBUG nova.compute.manager [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 954.538013] env[69475]: DEBUG nova.virt.hardware [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 954.538337] env[69475]: DEBUG nova.virt.hardware [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 954.538525] env[69475]: DEBUG nova.virt.hardware [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 954.538745] env[69475]: DEBUG nova.virt.hardware [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 954.538902] env[69475]: DEBUG nova.virt.hardware [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 954.539140] env[69475]: DEBUG nova.virt.hardware [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 954.539377] env[69475]: DEBUG nova.virt.hardware [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 954.539599] env[69475]: DEBUG nova.virt.hardware [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 954.539802] env[69475]: DEBUG nova.virt.hardware [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 954.539989] env[69475]: DEBUG nova.virt.hardware [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 954.540192] env[69475]: DEBUG nova.virt.hardware [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 954.541731] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94e96d70-d6ea-4cfb-b6dc-bc261497fee2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.553412] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d329362-e43a-4b40-8bbf-068c0a4997ff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.616623] env[69475]: DEBUG nova.scheduler.client.report [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 954.629471] env[69475]: DEBUG oslo_concurrency.lockutils [req-e13124ea-21dd-4d0d-8d95-df8a831fde8b req-637abc62-31b4-46d4-aa52-846d051201d2 service nova] Releasing lock "refresh_cache-c9b2f701-a73a-4561-b637-62e3ce98a44f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.629913] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508485, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093805} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.630201] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 954.631057] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64fcff2a-2be8-4746-a1cb-eda5db849d96 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.655287] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] 8f18d683-7734-4798-8963-7336fe229f16/8f18d683-7734-4798-8963-7336fe229f16.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 954.655966] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f000561-7654-4014-8a00-46bc24ea22a3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.678572] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 954.678572] env[69475]: value = "task-3508491" [ 954.678572] env[69475]: _type = "Task" [ 954.678572] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.687467] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508491, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.718331] env[69475]: DEBUG nova.network.neutron [req-31010a46-137d-45fb-b3ab-b524ea439189 req-674d9597-8a36-476f-905a-5ec3393cc478 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Updated VIF entry in instance network info cache for port eb5ab964-44c1-4189-9805-f3c80abb01ca. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 954.718764] env[69475]: DEBUG nova.network.neutron [req-31010a46-137d-45fb-b3ab-b524ea439189 req-674d9597-8a36-476f-905a-5ec3393cc478 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Updating instance_info_cache with network_info: [{"id": "30f10284-138a-4774-b024-33ffa906ef81", "address": "fa:16:3e:49:ea:cb", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30f10284-13", "ovs_interfaceid": "30f10284-138a-4774-b024-33ffa906ef81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "eb5ab964-44c1-4189-9805-f3c80abb01ca", "address": "fa:16:3e:72:75:cb", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeb5ab964-44", "ovs_interfaceid": "eb5ab964-44c1-4189-9805-f3c80abb01ca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.730660] env[69475]: DEBUG oslo_vmware.api [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508482, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.791589] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508489, 'name': CreateVM_Task, 'duration_secs': 0.374611} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.794896] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 954.795824] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.795824] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.796291] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 954.796745] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1114482f-a51e-42dc-92c6-1dad4673f8ac {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.801336] env[69475]: DEBUG oslo_vmware.api [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508484, 'name': RemoveSnapshot_Task, 'duration_secs': 0.752883} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.801870] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 954.802113] env[69475]: INFO nova.compute.manager [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Took 16.19 seconds to snapshot the instance on the hypervisor. [ 954.805416] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 954.805416] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529ba061-e183-452b-e98c-14c408859e88" [ 954.805416] env[69475]: _type = "Task" [ 954.805416] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.813091] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529ba061-e183-452b-e98c-14c408859e88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.846927] env[69475]: DEBUG oslo_vmware.api [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Task: {'id': task-3508490, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.389438} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.847189] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 954.847371] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 954.847547] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 954.847717] env[69475]: INFO nova.compute.manager [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Took 1.18 seconds to destroy the instance on the hypervisor. [ 954.847951] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 954.848155] env[69475]: DEBUG nova.compute.manager [-] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 954.848291] env[69475]: DEBUG nova.network.neutron [-] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 955.125211] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.672s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.125857] env[69475]: DEBUG nova.compute.manager [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 955.128953] env[69475]: DEBUG oslo_concurrency.lockutils [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 19.590s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 955.190228] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508491, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.222385] env[69475]: DEBUG oslo_concurrency.lockutils [req-31010a46-137d-45fb-b3ab-b524ea439189 req-674d9597-8a36-476f-905a-5ec3393cc478 service nova] Releasing lock "refresh_cache-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.222514] env[69475]: DEBUG nova.compute.manager [req-31010a46-137d-45fb-b3ab-b524ea439189 req-674d9597-8a36-476f-905a-5ec3393cc478 service nova] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Received event network-vif-deleted-7ca686e9-6693-4490-aabc-712796a8fe04 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 955.235252] env[69475]: DEBUG oslo_vmware.api [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508482, 'name': ReconfigVM_Task, 'duration_secs': 1.444278} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.235760] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.235971] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Reconfigured VM to attach interface {{(pid=69475) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 955.321510] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529ba061-e183-452b-e98c-14c408859e88, 'name': SearchDatastore_Task, 'duration_secs': 0.03435} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.321811] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.322056] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 955.322295] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.322439] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 955.322616] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 955.322883] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-55095bc9-483d-46a0-8881-31ce88002bf9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.332519] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 955.332669] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 955.333471] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66bd82eb-c1e8-43eb-a107-7cddcef41e64 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.341644] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 955.341644] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f72730-34a1-5f77-d0df-39334dd92ee0" [ 955.341644] env[69475]: _type = "Task" [ 955.341644] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.346591] env[69475]: DEBUG nova.compute.manager [None req-a6a32018-f67d-4a63-87cf-d1cda64e40e9 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Found 1 images (rotation: 2) {{(pid=69475) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 955.353074] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f72730-34a1-5f77-d0df-39334dd92ee0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.500291] env[69475]: DEBUG nova.network.neutron [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Successfully updated port: de52f276-c28b-45f5-8248-9019b9765828 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 955.635146] env[69475]: DEBUG nova.compute.utils [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 955.639924] env[69475]: INFO nova.compute.claims [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 955.642975] env[69475]: DEBUG nova.compute.manager [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 955.642975] env[69475]: DEBUG nova.network.neutron [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 955.680896] env[69475]: DEBUG nova.network.neutron [-] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.692063] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508491, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.694309] env[69475]: DEBUG nova.policy [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '11c9c75b1984423f860daec9827e7ce6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67d27343d8c04fc9a2bed7a764f6cf82', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 955.740097] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0723ca4c-9263-4c8e-bbeb-cbaa869e7ad3 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.730s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.853592] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f72730-34a1-5f77-d0df-39334dd92ee0, 'name': SearchDatastore_Task, 'duration_secs': 0.013613} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.854499] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e05ed0f-5629-4d16-b816-d9140dd35a27 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.862390] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 955.862390] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e78638-105e-e4d2-176b-83458205b9e3" [ 955.862390] env[69475]: _type = "Task" [ 955.862390] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.872427] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e78638-105e-e4d2-176b-83458205b9e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.006896] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "refresh_cache-ff09407e-93ea-4919-ba5f-b7ee6dd018a4" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.006896] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "refresh_cache-ff09407e-93ea-4919-ba5f-b7ee6dd018a4" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.006896] env[69475]: DEBUG nova.network.neutron [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 956.113258] env[69475]: DEBUG nova.compute.manager [req-5d0177eb-f0b7-4d90-a241-61291940d865 req-6d80e2c8-5993-4994-80e9-6890b5f517ab service nova] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Received event network-vif-plugged-325d7757-39de-4455-954e-feb4a1be8355 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 956.113439] env[69475]: DEBUG oslo_concurrency.lockutils [req-5d0177eb-f0b7-4d90-a241-61291940d865 req-6d80e2c8-5993-4994-80e9-6890b5f517ab service nova] Acquiring lock "4066a18f-acc5-49b5-941c-0711f29bdcd2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.113644] env[69475]: DEBUG oslo_concurrency.lockutils [req-5d0177eb-f0b7-4d90-a241-61291940d865 req-6d80e2c8-5993-4994-80e9-6890b5f517ab service nova] Lock "4066a18f-acc5-49b5-941c-0711f29bdcd2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.113814] env[69475]: DEBUG oslo_concurrency.lockutils [req-5d0177eb-f0b7-4d90-a241-61291940d865 req-6d80e2c8-5993-4994-80e9-6890b5f517ab service nova] Lock "4066a18f-acc5-49b5-941c-0711f29bdcd2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.114010] env[69475]: DEBUG nova.compute.manager [req-5d0177eb-f0b7-4d90-a241-61291940d865 req-6d80e2c8-5993-4994-80e9-6890b5f517ab service nova] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] No waiting events found dispatching network-vif-plugged-325d7757-39de-4455-954e-feb4a1be8355 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 956.114350] env[69475]: WARNING nova.compute.manager [req-5d0177eb-f0b7-4d90-a241-61291940d865 req-6d80e2c8-5993-4994-80e9-6890b5f517ab service nova] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Received unexpected event network-vif-plugged-325d7757-39de-4455-954e-feb4a1be8355 for instance with vm_state building and task_state spawning. [ 956.114507] env[69475]: DEBUG nova.compute.manager [req-5d0177eb-f0b7-4d90-a241-61291940d865 req-6d80e2c8-5993-4994-80e9-6890b5f517ab service nova] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Received event network-changed-325d7757-39de-4455-954e-feb4a1be8355 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 956.114661] env[69475]: DEBUG nova.compute.manager [req-5d0177eb-f0b7-4d90-a241-61291940d865 req-6d80e2c8-5993-4994-80e9-6890b5f517ab service nova] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Refreshing instance network info cache due to event network-changed-325d7757-39de-4455-954e-feb4a1be8355. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 956.114844] env[69475]: DEBUG oslo_concurrency.lockutils [req-5d0177eb-f0b7-4d90-a241-61291940d865 req-6d80e2c8-5993-4994-80e9-6890b5f517ab service nova] Acquiring lock "refresh_cache-4066a18f-acc5-49b5-941c-0711f29bdcd2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.114979] env[69475]: DEBUG oslo_concurrency.lockutils [req-5d0177eb-f0b7-4d90-a241-61291940d865 req-6d80e2c8-5993-4994-80e9-6890b5f517ab service nova] Acquired lock "refresh_cache-4066a18f-acc5-49b5-941c-0711f29bdcd2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.115156] env[69475]: DEBUG nova.network.neutron [req-5d0177eb-f0b7-4d90-a241-61291940d865 req-6d80e2c8-5993-4994-80e9-6890b5f517ab service nova] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Refreshing network info cache for port 325d7757-39de-4455-954e-feb4a1be8355 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 956.141940] env[69475]: DEBUG nova.network.neutron [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Successfully created port: f493e873-49de-4112-9562-cbb7d23892c8 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 956.144460] env[69475]: DEBUG nova.compute.manager [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 956.148419] env[69475]: INFO nova.compute.resource_tracker [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Updating resource usage from migration ffcfa08f-8f32-497c-b2ed-6898dd5bbd21 [ 956.187226] env[69475]: INFO nova.compute.manager [-] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Took 1.34 seconds to deallocate network for instance. [ 956.198017] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508491, 'name': ReconfigVM_Task, 'duration_secs': 1.049787} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.198017] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Reconfigured VM instance instance-00000053 to attach disk [datastore2] 8f18d683-7734-4798-8963-7336fe229f16/8f18d683-7734-4798-8963-7336fe229f16.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 956.198413] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0bbfe102-ab74-4bd8-aa79-e26d5ce4d1e8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.207533] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 956.207533] env[69475]: value = "task-3508492" [ 956.207533] env[69475]: _type = "Task" [ 956.207533] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.221847] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508492, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.385395] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e78638-105e-e4d2-176b-83458205b9e3, 'name': SearchDatastore_Task, 'duration_secs': 0.017234} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.390808] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 956.391741] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 4066a18f-acc5-49b5-941c-0711f29bdcd2/4066a18f-acc5-49b5-941c-0711f29bdcd2.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 956.392423] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9396231b-4f03-440a-b8bb-3afba032ac7a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.402534] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 956.402534] env[69475]: value = "task-3508493" [ 956.402534] env[69475]: _type = "Task" [ 956.402534] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.419508] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508493, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.554321] env[69475]: DEBUG nova.network.neutron [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 956.699444] env[69475]: DEBUG oslo_concurrency.lockutils [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.700249] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81380012-a3f3-4448-b094-f00d28d0423e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.726034] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049db3f9-cd8e-461f-a9e5-c47de3fe9d47 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.733180] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508492, 'name': Rename_Task, 'duration_secs': 0.205426} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.733561] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 956.734351] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-672916fe-264b-4aaf-8529-2a72ddea1ed1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.766651] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c03b34f1-1366-4005-87f4-c41537928c50 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.769101] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 956.769101] env[69475]: value = "task-3508494" [ 956.769101] env[69475]: _type = "Task" [ 956.769101] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.776771] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da34eefc-e3ef-457f-97e7-59da1b4ffdc2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.786018] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508494, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.797443] env[69475]: DEBUG nova.compute.provider_tree [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 956.895993] env[69475]: DEBUG nova.network.neutron [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Updating instance_info_cache with network_info: [{"id": "de52f276-c28b-45f5-8248-9019b9765828", "address": "fa:16:3e:10:a6:34", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde52f276-c2", "ovs_interfaceid": "de52f276-c28b-45f5-8248-9019b9765828", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.907110] env[69475]: DEBUG nova.network.neutron [req-5d0177eb-f0b7-4d90-a241-61291940d865 req-6d80e2c8-5993-4994-80e9-6890b5f517ab service nova] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Updated VIF entry in instance network info cache for port 325d7757-39de-4455-954e-feb4a1be8355. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 956.907473] env[69475]: DEBUG nova.network.neutron [req-5d0177eb-f0b7-4d90-a241-61291940d865 req-6d80e2c8-5993-4994-80e9-6890b5f517ab service nova] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Updating instance_info_cache with network_info: [{"id": "325d7757-39de-4455-954e-feb4a1be8355", "address": "fa:16:3e:de:79:fa", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap325d7757-39", "ovs_interfaceid": "325d7757-39de-4455-954e-feb4a1be8355", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.919130] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508493, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482624} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.919985] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 4066a18f-acc5-49b5-941c-0711f29bdcd2/4066a18f-acc5-49b5-941c-0711f29bdcd2.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 956.919985] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 956.920502] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0fc3204c-8918-4c78-86b0-c63c818ef30b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.932116] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 956.932116] env[69475]: value = "task-3508495" [ 956.932116] env[69475]: _type = "Task" [ 956.932116] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.943689] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508495, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.161014] env[69475]: DEBUG nova.compute.manager [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 957.194407] env[69475]: DEBUG nova.virt.hardware [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 957.194605] env[69475]: DEBUG nova.virt.hardware [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 957.194766] env[69475]: DEBUG nova.virt.hardware [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 957.194948] env[69475]: DEBUG nova.virt.hardware [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 957.195112] env[69475]: DEBUG nova.virt.hardware [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 957.195253] env[69475]: DEBUG nova.virt.hardware [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 957.195474] env[69475]: DEBUG nova.virt.hardware [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 957.195686] env[69475]: DEBUG nova.virt.hardware [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 957.195870] env[69475]: DEBUG nova.virt.hardware [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 957.196051] env[69475]: DEBUG nova.virt.hardware [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 957.196302] env[69475]: DEBUG nova.virt.hardware [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 957.197296] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ebb863f-03d4-4261-a314-304aba12fbfe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.206815] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927dcd6a-716f-49bf-b3f3-d856472dee82 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.283641] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508494, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.303135] env[69475]: DEBUG nova.scheduler.client.report [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 957.399437] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "refresh_cache-ff09407e-93ea-4919-ba5f-b7ee6dd018a4" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.399801] env[69475]: DEBUG nova.compute.manager [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Instance network_info: |[{"id": "de52f276-c28b-45f5-8248-9019b9765828", "address": "fa:16:3e:10:a6:34", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde52f276-c2", "ovs_interfaceid": "de52f276-c28b-45f5-8248-9019b9765828", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 957.400384] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:a6:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271fe7a0-dfd7-409b-920a-cf83ef1a86a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de52f276-c28b-45f5-8248-9019b9765828', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 957.411212] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 957.411212] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 957.411212] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fea90448-32c4-450c-9d7f-5a3e9b7f4226 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.429768] env[69475]: DEBUG oslo_concurrency.lockutils [req-5d0177eb-f0b7-4d90-a241-61291940d865 req-6d80e2c8-5993-4994-80e9-6890b5f517ab service nova] Releasing lock "refresh_cache-4066a18f-acc5-49b5-941c-0711f29bdcd2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.437376] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 957.437376] env[69475]: value = "task-3508496" [ 957.437376] env[69475]: _type = "Task" [ 957.437376] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.443530] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508495, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068758} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.444216] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 957.445320] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71d1c88-16c9-418d-b694-cd01fd9656f6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.451133] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508496, 'name': CreateVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.471266] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 4066a18f-acc5-49b5-941c-0711f29bdcd2/4066a18f-acc5-49b5-941c-0711f29bdcd2.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 957.471581] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8734df6b-760e-413b-9994-71e06aa78b49 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.489374] env[69475]: DEBUG nova.compute.manager [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Received event network-changed-04c0ec8b-7341-4495-9aa9-5edcc8fd816a {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 957.489586] env[69475]: DEBUG nova.compute.manager [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Refreshing instance network info cache due to event network-changed-04c0ec8b-7341-4495-9aa9-5edcc8fd816a. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 957.489847] env[69475]: DEBUG oslo_concurrency.lockutils [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] Acquiring lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.489990] env[69475]: DEBUG oslo_concurrency.lockutils [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] Acquired lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.490160] env[69475]: DEBUG nova.network.neutron [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Refreshing network info cache for port 04c0ec8b-7341-4495-9aa9-5edcc8fd816a {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 957.495853] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 957.495853] env[69475]: value = "task-3508497" [ 957.495853] env[69475]: _type = "Task" [ 957.495853] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.507051] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508497, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.783054] env[69475]: DEBUG oslo_vmware.api [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508494, 'name': PowerOnVM_Task, 'duration_secs': 0.680452} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.783414] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 957.784030] env[69475]: INFO nova.compute.manager [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Took 10.73 seconds to spawn the instance on the hypervisor. [ 957.784030] env[69475]: DEBUG nova.compute.manager [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 957.784509] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f21f02e4-7e9d-414f-9996-80616d0c64e4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.810076] env[69475]: DEBUG oslo_concurrency.lockutils [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.681s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.810076] env[69475]: INFO nova.compute.manager [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Migrating [ 957.817035] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.971s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 957.817143] env[69475]: DEBUG nova.objects.instance [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lazy-loading 'resources' on Instance uuid e8c2d21e-2e42-48de-928e-c5fd944899b6 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 957.953562] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508496, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.006696] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508497, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.155155] env[69475]: DEBUG nova.network.neutron [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Successfully updated port: f493e873-49de-4112-9562-cbb7d23892c8 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 958.193684] env[69475]: DEBUG nova.network.neutron [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Updated VIF entry in instance network info cache for port 04c0ec8b-7341-4495-9aa9-5edcc8fd816a. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 958.194071] env[69475]: DEBUG nova.network.neutron [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Updating instance_info_cache with network_info: [{"id": "04c0ec8b-7341-4495-9aa9-5edcc8fd816a", "address": "fa:16:3e:16:48:72", "network": {"id": "b4898ad4-4e6f-4225-a619-45236df7e9ad", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-809945386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4d8b5413bb2444538234a0c37633c89f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "073f8535-6b3a-4d21-a754-4c975554dcbf", "external-id": "nsx-vlan-transportzone-111", "segmentation_id": 111, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04c0ec8b-73", "ovs_interfaceid": "04c0ec8b-7341-4495-9aa9-5edcc8fd816a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.304796] env[69475]: INFO nova.compute.manager [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Took 37.90 seconds to build instance. [ 958.321720] env[69475]: DEBUG nova.objects.instance [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lazy-loading 'numa_topology' on Instance uuid e8c2d21e-2e42-48de-928e-c5fd944899b6 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 958.328330] env[69475]: DEBUG oslo_concurrency.lockutils [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "refresh_cache-b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.328471] env[69475]: DEBUG oslo_concurrency.lockutils [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "refresh_cache-b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.328643] env[69475]: DEBUG nova.network.neutron [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 958.448074] env[69475]: DEBUG nova.compute.manager [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 958.448357] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508496, 'name': CreateVM_Task, 'duration_secs': 0.837512} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.449060] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4132dffe-c4d6-4160-ba0c-f310daf96e3d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.452035] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 958.452709] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.452875] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.453188] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 958.453767] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96ac8671-b53f-4224-b90f-c121f93f67d6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.461469] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 958.461469] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525644a2-d164-8965-b1b7-eeb08e271527" [ 958.461469] env[69475]: _type = "Task" [ 958.461469] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.472104] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525644a2-d164-8965-b1b7-eeb08e271527, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.506532] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508497, 'name': ReconfigVM_Task, 'duration_secs': 0.863589} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.506798] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 4066a18f-acc5-49b5-941c-0711f29bdcd2/4066a18f-acc5-49b5-941c-0711f29bdcd2.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 958.507538] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-43664c11-ad6c-4121-9a84-7ba9e75221f8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.513871] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 958.513871] env[69475]: value = "task-3508498" [ 958.513871] env[69475]: _type = "Task" [ 958.513871] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.521835] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508498, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.657100] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "refresh_cache-41ddf915-343b-46e4-834e-11ab3899242f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.660586] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquired lock "refresh_cache-41ddf915-343b-46e4-834e-11ab3899242f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.660586] env[69475]: DEBUG nova.network.neutron [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 958.699915] env[69475]: DEBUG oslo_concurrency.lockutils [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] Releasing lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.699915] env[69475]: DEBUG nova.compute.manager [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Received event network-vif-deleted-409b371c-7a12-4772-a463-d5e8bc596b60 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 958.699915] env[69475]: DEBUG nova.compute.manager [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Received event network-vif-plugged-de52f276-c28b-45f5-8248-9019b9765828 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 958.699915] env[69475]: DEBUG oslo_concurrency.lockutils [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] Acquiring lock "ff09407e-93ea-4919-ba5f-b7ee6dd018a4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.699915] env[69475]: DEBUG oslo_concurrency.lockutils [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] Lock "ff09407e-93ea-4919-ba5f-b7ee6dd018a4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 958.699915] env[69475]: DEBUG oslo_concurrency.lockutils [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] Lock "ff09407e-93ea-4919-ba5f-b7ee6dd018a4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.699915] env[69475]: DEBUG nova.compute.manager [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] No waiting events found dispatching network-vif-plugged-de52f276-c28b-45f5-8248-9019b9765828 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 958.699915] env[69475]: WARNING nova.compute.manager [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Received unexpected event network-vif-plugged-de52f276-c28b-45f5-8248-9019b9765828 for instance with vm_state building and task_state spawning. [ 958.699915] env[69475]: DEBUG nova.compute.manager [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Received event network-changed-04c0ec8b-7341-4495-9aa9-5edcc8fd816a {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 958.699915] env[69475]: DEBUG nova.compute.manager [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Refreshing instance network info cache due to event network-changed-04c0ec8b-7341-4495-9aa9-5edcc8fd816a. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 958.699915] env[69475]: DEBUG oslo_concurrency.lockutils [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] Acquiring lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.699915] env[69475]: DEBUG oslo_concurrency.lockutils [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] Acquired lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.699915] env[69475]: DEBUG nova.network.neutron [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Refreshing network info cache for port 04c0ec8b-7341-4495-9aa9-5edcc8fd816a {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 958.807469] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4e4d418c-eee4-471e-ba69-f0ab8b453ce5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "8f18d683-7734-4798-8963-7336fe229f16" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.416s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.825154] env[69475]: DEBUG nova.objects.base [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 958.963402] env[69475]: INFO nova.compute.manager [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] instance snapshotting [ 958.963959] env[69475]: DEBUG nova.objects.instance [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lazy-loading 'flavor' on Instance uuid 82236043-3222-4134-8717-4c239ed12aba {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 958.982146] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525644a2-d164-8965-b1b7-eeb08e271527, 'name': SearchDatastore_Task, 'duration_secs': 0.019622} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.982691] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.983067] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 958.983344] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.983531] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 958.983721] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 958.984513] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce4ff41d-1374-4b31-a542-a5ae60bdd15f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.998344] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 958.998535] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 958.999302] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96a8edb9-8982-43e2-905e-fd458de04d39 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.004599] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 959.004599] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52df5d56-f95b-0577-db2a-fe14814f8259" [ 959.004599] env[69475]: _type = "Task" [ 959.004599] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.015217] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52df5d56-f95b-0577-db2a-fe14814f8259, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.022577] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508498, 'name': Rename_Task, 'duration_secs': 0.274176} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.025019] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 959.025435] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87eb0989-55b0-4f43-a5ee-abf3e3a47fa5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.033330] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 959.033330] env[69475]: value = "task-3508499" [ 959.033330] env[69475]: _type = "Task" [ 959.033330] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.042836] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508499, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.111574] env[69475]: DEBUG nova.network.neutron [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Updating instance_info_cache with network_info: [{"id": "afb4cf7c-0e25-4b9a-8f0d-90f08fecda68", "address": "fa:16:3e:29:af:35", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafb4cf7c-0e", "ovs_interfaceid": "afb4cf7c-0e25-4b9a-8f0d-90f08fecda68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.194041] env[69475]: DEBUG nova.network.neutron [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 959.241714] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96a7173-a27b-4ed5-81d5-ba1b481b2ecd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.256299] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379973fd-2c65-4c44-9e3a-437d10ad8398 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.304247] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1cba387-b268-4668-802d-ff1368b27db6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.313018] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aca2eb8-4d59-4db9-a0ce-e4c8671c2d24 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.326804] env[69475]: DEBUG nova.compute.provider_tree [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 959.383990] env[69475]: DEBUG nova.network.neutron [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Updating instance_info_cache with network_info: [{"id": "f493e873-49de-4112-9562-cbb7d23892c8", "address": "fa:16:3e:95:c3:f6", "network": {"id": "b825e883-3197-4a41-a94b-f363fe49fc0d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1376961670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67d27343d8c04fc9a2bed7a764f6cf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf493e873-49", "ovs_interfaceid": "f493e873-49de-4112-9562-cbb7d23892c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.478710] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1dae22d-bbb9-4d1a-9346-0d350d162333 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.497678] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e3d599-9053-43f9-a32c-a3cef2f3d19b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.515033] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52df5d56-f95b-0577-db2a-fe14814f8259, 'name': SearchDatastore_Task, 'duration_secs': 0.030985} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.516379] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9c70643-5170-445e-9163-111737af4bbc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.521047] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 959.521047] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5201cc14-3a87-4963-e2c2-a2720130d740" [ 959.521047] env[69475]: _type = "Task" [ 959.521047] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.529331] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5201cc14-3a87-4963-e2c2-a2720130d740, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.544837] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508499, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.600781] env[69475]: DEBUG nova.network.neutron [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Updated VIF entry in instance network info cache for port 04c0ec8b-7341-4495-9aa9-5edcc8fd816a. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 959.601559] env[69475]: DEBUG nova.network.neutron [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Updating instance_info_cache with network_info: [{"id": "04c0ec8b-7341-4495-9aa9-5edcc8fd816a", "address": "fa:16:3e:16:48:72", "network": {"id": "b4898ad4-4e6f-4225-a619-45236df7e9ad", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-809945386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.129", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4d8b5413bb2444538234a0c37633c89f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "073f8535-6b3a-4d21-a754-4c975554dcbf", "external-id": "nsx-vlan-transportzone-111", "segmentation_id": 111, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04c0ec8b-73", "ovs_interfaceid": "04c0ec8b-7341-4495-9aa9-5edcc8fd816a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.615188] env[69475]: DEBUG oslo_concurrency.lockutils [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "refresh_cache-b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.830099] env[69475]: DEBUG nova.scheduler.client.report [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 959.847409] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "211f895a-bba5-4f10-9296-0d461af49f98" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.847678] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "211f895a-bba5-4f10-9296-0d461af49f98" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.886881] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Releasing lock "refresh_cache-41ddf915-343b-46e4-834e-11ab3899242f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 959.887222] env[69475]: DEBUG nova.compute.manager [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Instance network_info: |[{"id": "f493e873-49de-4112-9562-cbb7d23892c8", "address": "fa:16:3e:95:c3:f6", "network": {"id": "b825e883-3197-4a41-a94b-f363fe49fc0d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1376961670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67d27343d8c04fc9a2bed7a764f6cf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf493e873-49", "ovs_interfaceid": "f493e873-49de-4112-9562-cbb7d23892c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 959.887637] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:c3:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '94926d5b-bfab-4c04-85b5-0fe89934c8ff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f493e873-49de-4112-9562-cbb7d23892c8', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 959.895135] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Creating folder: Project (67d27343d8c04fc9a2bed7a764f6cf82). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 959.896029] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e08d8636-6100-493b-9f34-33edd1eb76e1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.907439] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Created folder: Project (67d27343d8c04fc9a2bed7a764f6cf82) in parent group-v700823. [ 959.907639] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Creating folder: Instances. Parent ref: group-v701060. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 959.907876] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-75d08cde-c0a8-4b11-bac6-6f3ce079b0f8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.917144] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Created folder: Instances in parent group-v701060. [ 959.917392] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 959.917580] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 959.917783] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2cda4e75-674a-4d95-a859-36b477863419 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.937566] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 959.937566] env[69475]: value = "task-3508502" [ 959.937566] env[69475]: _type = "Task" [ 959.937566] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.946284] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508502, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.011495] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 960.012232] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-9cb0ce5f-1ba1-40be-bd34-ed4a063a6057 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.019209] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 960.019209] env[69475]: value = "task-3508503" [ 960.019209] env[69475]: _type = "Task" [ 960.019209] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.031050] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508503, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.035640] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5201cc14-3a87-4963-e2c2-a2720130d740, 'name': SearchDatastore_Task, 'duration_secs': 0.011604} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.038669] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.038937] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] ff09407e-93ea-4919-ba5f-b7ee6dd018a4/ff09407e-93ea-4919-ba5f-b7ee6dd018a4.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 960.039205] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1db02507-57e9-4237-ba8a-db9010375176 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.046821] env[69475]: DEBUG oslo_vmware.api [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508499, 'name': PowerOnVM_Task, 'duration_secs': 0.580829} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.048091] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 960.049098] env[69475]: INFO nova.compute.manager [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Took 8.14 seconds to spawn the instance on the hypervisor. [ 960.049098] env[69475]: DEBUG nova.compute.manager [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 960.049098] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 960.049098] env[69475]: value = "task-3508504" [ 960.049098] env[69475]: _type = "Task" [ 960.049098] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.049475] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be71c620-1d7a-43ed-8b9f-545fd2e1bc56 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.064983] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508504, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.082315] env[69475]: DEBUG nova.compute.manager [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Received event network-vif-plugged-f493e873-49de-4112-9562-cbb7d23892c8 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 960.082315] env[69475]: DEBUG oslo_concurrency.lockutils [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] Acquiring lock "41ddf915-343b-46e4-834e-11ab3899242f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.082315] env[69475]: DEBUG oslo_concurrency.lockutils [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] Lock "41ddf915-343b-46e4-834e-11ab3899242f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.082315] env[69475]: DEBUG oslo_concurrency.lockutils [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] Lock "41ddf915-343b-46e4-834e-11ab3899242f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.082645] env[69475]: DEBUG nova.compute.manager [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] No waiting events found dispatching network-vif-plugged-f493e873-49de-4112-9562-cbb7d23892c8 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 960.082980] env[69475]: WARNING nova.compute.manager [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Received unexpected event network-vif-plugged-f493e873-49de-4112-9562-cbb7d23892c8 for instance with vm_state building and task_state spawning. [ 960.083291] env[69475]: DEBUG nova.compute.manager [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Received event network-changed-f493e873-49de-4112-9562-cbb7d23892c8 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 960.083632] env[69475]: DEBUG nova.compute.manager [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Refreshing instance network info cache due to event network-changed-f493e873-49de-4112-9562-cbb7d23892c8. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 960.083950] env[69475]: DEBUG oslo_concurrency.lockutils [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] Acquiring lock "refresh_cache-41ddf915-343b-46e4-834e-11ab3899242f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.084233] env[69475]: DEBUG oslo_concurrency.lockutils [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] Acquired lock "refresh_cache-41ddf915-343b-46e4-834e-11ab3899242f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.084513] env[69475]: DEBUG nova.network.neutron [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Refreshing network info cache for port f493e873-49de-4112-9562-cbb7d23892c8 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 960.103612] env[69475]: DEBUG oslo_concurrency.lockutils [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] Releasing lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.105585] env[69475]: DEBUG nova.compute.manager [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Received event network-changed-de52f276-c28b-45f5-8248-9019b9765828 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 960.105585] env[69475]: DEBUG nova.compute.manager [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Refreshing instance network info cache due to event network-changed-de52f276-c28b-45f5-8248-9019b9765828. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 960.105585] env[69475]: DEBUG oslo_concurrency.lockutils [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] Acquiring lock "refresh_cache-ff09407e-93ea-4919-ba5f-b7ee6dd018a4" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.105585] env[69475]: DEBUG oslo_concurrency.lockutils [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] Acquired lock "refresh_cache-ff09407e-93ea-4919-ba5f-b7ee6dd018a4" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.105585] env[69475]: DEBUG nova.network.neutron [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Refreshing network info cache for port de52f276-c28b-45f5-8248-9019b9765828 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 960.340425] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.521s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.341157] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.799s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.341748] env[69475]: DEBUG nova.objects.instance [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lazy-loading 'resources' on Instance uuid 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 960.354615] env[69475]: DEBUG nova.compute.manager [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 960.376978] env[69475]: DEBUG oslo_concurrency.lockutils [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "interface-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0-eb5ab964-44c1-4189-9805-f3c80abb01ca" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.377304] env[69475]: DEBUG oslo_concurrency.lockutils [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0-eb5ab964-44c1-4189-9805-f3c80abb01ca" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.448175] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508502, 'name': CreateVM_Task, 'duration_secs': 0.327467} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.448372] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 960.449116] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.449288] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.449616] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 960.449886] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32329a85-625f-459e-ac98-86e253983abf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.455887] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 960.455887] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5256c666-af82-a43f-2ab0-84cb849d5f41" [ 960.455887] env[69475]: _type = "Task" [ 960.455887] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.466150] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5256c666-af82-a43f-2ab0-84cb849d5f41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.528844] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508503, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.562146] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508504, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.454946} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.562462] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] ff09407e-93ea-4919-ba5f-b7ee6dd018a4/ff09407e-93ea-4919-ba5f-b7ee6dd018a4.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 960.562704] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 960.562996] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b362b7ed-e2bc-4a73-9482-46223e2b238d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.572990] env[69475]: INFO nova.compute.manager [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Took 36.83 seconds to build instance. [ 960.575762] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 960.575762] env[69475]: value = "task-3508505" [ 960.575762] env[69475]: _type = "Task" [ 960.575762] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.586389] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508505, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.831922] env[69475]: DEBUG nova.network.neutron [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Updated VIF entry in instance network info cache for port de52f276-c28b-45f5-8248-9019b9765828. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 960.831922] env[69475]: DEBUG nova.network.neutron [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Updating instance_info_cache with network_info: [{"id": "de52f276-c28b-45f5-8248-9019b9765828", "address": "fa:16:3e:10:a6:34", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde52f276-c2", "ovs_interfaceid": "de52f276-c28b-45f5-8248-9019b9765828", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.852375] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e672fdb6-acca-4732-9c55-f3deec8a0dbf tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e8c2d21e-2e42-48de-928e-c5fd944899b6" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 43.868s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.852375] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e8c2d21e-2e42-48de-928e-c5fd944899b6" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 18.330s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.852375] env[69475]: INFO nova.compute.manager [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Unshelving [ 960.876137] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.880368] env[69475]: DEBUG oslo_concurrency.lockutils [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.880708] env[69475]: DEBUG oslo_concurrency.lockutils [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.881407] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1cf2010-6da1-4316-b9ab-9e23d870dfbc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.902590] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7c0167e-5fdb-47c2-9c1a-5747a321b161 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.935074] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Reconfiguring VM to detach interface {{(pid=69475) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 960.937985] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30333732-3519-4b11-a4a8-760baf31389c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.960755] env[69475]: DEBUG oslo_vmware.api [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 960.960755] env[69475]: value = "task-3508506" [ 960.960755] env[69475]: _type = "Task" [ 960.960755] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.967588] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5256c666-af82-a43f-2ab0-84cb849d5f41, 'name': SearchDatastore_Task, 'duration_secs': 0.026631} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.970333] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.970591] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 960.970844] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.970993] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.971245] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 960.971846] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dbc0b35e-d6dd-4b8f-9191-b0bed468fbcb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.977117] env[69475]: DEBUG oslo_vmware.api [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508506, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.986462] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 960.986594] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 960.987381] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-04ec0dcc-f8de-4d41-8b98-a7e3d94a8754 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.992707] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 960.992707] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a50c42-b27c-d23e-3b92-1b934c35b078" [ 960.992707] env[69475]: _type = "Task" [ 960.992707] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.000195] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a50c42-b27c-d23e-3b92-1b934c35b078, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.020728] env[69475]: DEBUG nova.network.neutron [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Updated VIF entry in instance network info cache for port f493e873-49de-4112-9562-cbb7d23892c8. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 961.021129] env[69475]: DEBUG nova.network.neutron [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Updating instance_info_cache with network_info: [{"id": "f493e873-49de-4112-9562-cbb7d23892c8", "address": "fa:16:3e:95:c3:f6", "network": {"id": "b825e883-3197-4a41-a94b-f363fe49fc0d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1376961670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67d27343d8c04fc9a2bed7a764f6cf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf493e873-49", "ovs_interfaceid": "f493e873-49de-4112-9562-cbb7d23892c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.033239] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508503, 'name': CreateSnapshot_Task, 'duration_secs': 0.871083} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.034165] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 961.034945] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89debf8-32a5-4a1f-ae10-3aa10a89bd76 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.077346] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1139fed0-cd93-4780-8e2e-15508f7e019c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "4066a18f-acc5-49b5-941c-0711f29bdcd2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.362s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.090033] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508505, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083739} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.090363] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 961.091230] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b62d5c9-246d-459a-9506-5e4c52e75171 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.116441] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] ff09407e-93ea-4919-ba5f-b7ee6dd018a4/ff09407e-93ea-4919-ba5f-b7ee6dd018a4.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 961.119269] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd19a694-ba94-4104-897d-09177774a121 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.138478] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4e3c9f-bd29-450b-9907-8fc84d944e8c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.158412] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Updating instance 'b8c50d0a-4b3d-4b70-9bd6-8304fa128e59' progress to 0 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 961.168520] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 961.168520] env[69475]: value = "task-3508507" [ 961.168520] env[69475]: _type = "Task" [ 961.168520] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.175417] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508507, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.340946] env[69475]: DEBUG oslo_concurrency.lockutils [req-c1bdf44b-60fa-45e3-94af-624f6f0e2d0e req-4fb27f53-c0dc-47b0-9aff-39dee9bc6adc service nova] Releasing lock "refresh_cache-ff09407e-93ea-4919-ba5f-b7ee6dd018a4" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 961.401749] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79daad34-49cb-42f2-a334-0251aafea35a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.409881] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee750af7-7dbf-4714-97eb-2ddf4abae93c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.440092] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46ae850e-9a59-42a4-be35-e7c961878829 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.447673] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82296af-6f02-46c3-af42-5e1de195bfe6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.461287] env[69475]: DEBUG nova.compute.provider_tree [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.471075] env[69475]: DEBUG oslo_vmware.api [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508506, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.502117] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a50c42-b27c-d23e-3b92-1b934c35b078, 'name': SearchDatastore_Task, 'duration_secs': 0.015819} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.502704] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51cd6892-53da-4e30-aaff-3c6142a16333 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.508102] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 961.508102] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52787f33-25a2-5c7b-a10a-2da1f42a7efc" [ 961.508102] env[69475]: _type = "Task" [ 961.508102] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.515103] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52787f33-25a2-5c7b-a10a-2da1f42a7efc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.528688] env[69475]: DEBUG oslo_concurrency.lockutils [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] Releasing lock "refresh_cache-41ddf915-343b-46e4-834e-11ab3899242f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 961.528918] env[69475]: DEBUG nova.compute.manager [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Received event network-changed-04c0ec8b-7341-4495-9aa9-5edcc8fd816a {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 961.529094] env[69475]: DEBUG nova.compute.manager [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Refreshing instance network info cache due to event network-changed-04c0ec8b-7341-4495-9aa9-5edcc8fd816a. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 961.529340] env[69475]: DEBUG oslo_concurrency.lockutils [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] Acquiring lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.529502] env[69475]: DEBUG oslo_concurrency.lockutils [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] Acquired lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.529671] env[69475]: DEBUG nova.network.neutron [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Refreshing network info cache for port 04c0ec8b-7341-4495-9aa9-5edcc8fd816a {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 961.554213] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 961.554778] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-02765466-1262-4081-964c-5596768bb563 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.564988] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 961.564988] env[69475]: value = "task-3508508" [ 961.564988] env[69475]: _type = "Task" [ 961.564988] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.573471] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508508, 'name': CloneVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.669501] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 961.669949] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ccedd69f-9654-4286-b7c0-17b1b87b6821 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.680660] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508507, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.682032] env[69475]: DEBUG oslo_vmware.api [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 961.682032] env[69475]: value = "task-3508509" [ 961.682032] env[69475]: _type = "Task" [ 961.682032] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.690214] env[69475]: DEBUG oslo_vmware.api [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508509, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.864019] env[69475]: DEBUG nova.compute.utils [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 961.967684] env[69475]: DEBUG nova.scheduler.client.report [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 961.977036] env[69475]: DEBUG oslo_vmware.api [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508506, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.018950] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52787f33-25a2-5c7b-a10a-2da1f42a7efc, 'name': SearchDatastore_Task, 'duration_secs': 0.009058} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.019252] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.019519] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 41ddf915-343b-46e4-834e-11ab3899242f/41ddf915-343b-46e4-834e-11ab3899242f.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 962.019806] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-993f1e4c-4280-49ca-9af4-f2f96583d216 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.030241] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 962.030241] env[69475]: value = "task-3508510" [ 962.030241] env[69475]: _type = "Task" [ 962.030241] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.043393] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508510, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.075943] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508508, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.180443] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508507, 'name': ReconfigVM_Task, 'duration_secs': 0.733346} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.180789] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Reconfigured VM instance instance-00000055 to attach disk [datastore1] ff09407e-93ea-4919-ba5f-b7ee6dd018a4/ff09407e-93ea-4919-ba5f-b7ee6dd018a4.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 962.181624] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69b7b7d4-44f4-402d-951f-b7b637f0965f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.193488] env[69475]: DEBUG oslo_vmware.api [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508509, 'name': PowerOffVM_Task, 'duration_secs': 0.442344} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.194998] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 962.195274] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Updating instance 'b8c50d0a-4b3d-4b70-9bd6-8304fa128e59' progress to 17 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 962.199441] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 962.199441] env[69475]: value = "task-3508511" [ 962.199441] env[69475]: _type = "Task" [ 962.199441] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.211488] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508511, 'name': Rename_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.368758] env[69475]: INFO nova.virt.block_device [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Booting with volume d875e52a-1617-4b13-83ce-60084abbe663 at /dev/sdb [ 962.416513] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-73b9646a-41e0-4def-8c70-ec5f5a4556ed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.432212] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2122a4c3-1c8f-402a-8cb9-4d962a12f687 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.473332] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.132s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.480190] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.086s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.481689] env[69475]: INFO nova.compute.claims [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 962.484819] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01524be5-b028-445e-b0a8-2f4239d8647d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.489361] env[69475]: DEBUG nova.network.neutron [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Updated VIF entry in instance network info cache for port 04c0ec8b-7341-4495-9aa9-5edcc8fd816a. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 962.489361] env[69475]: DEBUG nova.network.neutron [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Updating instance_info_cache with network_info: [{"id": "04c0ec8b-7341-4495-9aa9-5edcc8fd816a", "address": "fa:16:3e:16:48:72", "network": {"id": "b4898ad4-4e6f-4225-a619-45236df7e9ad", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-809945386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4d8b5413bb2444538234a0c37633c89f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "073f8535-6b3a-4d21-a754-4c975554dcbf", "external-id": "nsx-vlan-transportzone-111", "segmentation_id": 111, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04c0ec8b-73", "ovs_interfaceid": "04c0ec8b-7341-4495-9aa9-5edcc8fd816a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.496599] env[69475]: DEBUG oslo_vmware.api [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508506, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.501449] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d970fbc2-637c-4e13-aa1c-a5b91bfaaa0a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.525144] env[69475]: INFO nova.scheduler.client.report [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Deleted allocations for instance 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc [ 962.557314] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a47adb-ab23-4842-ae61-e24cbdfdd11b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.560908] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508510, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.567601] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bddb76d-aa74-4e8d-a24d-f41c58bf6533 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.582929] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508508, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.590212] env[69475]: DEBUG nova.virt.block_device [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Updating existing volume attachment record: 90422dbc-19da-4bc1-82e3-95541b090f2a {{(pid=69475) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 962.637595] env[69475]: DEBUG nova.compute.manager [req-f2118d9c-9e7a-4f04-b3ee-8f47933b1a12 req-e9063c3e-fa40-480d-91b7-acc3c115e868 service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Received event network-changed-04c0ec8b-7341-4495-9aa9-5edcc8fd816a {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 962.637787] env[69475]: DEBUG nova.compute.manager [req-f2118d9c-9e7a-4f04-b3ee-8f47933b1a12 req-e9063c3e-fa40-480d-91b7-acc3c115e868 service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Refreshing instance network info cache due to event network-changed-04c0ec8b-7341-4495-9aa9-5edcc8fd816a. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 962.637979] env[69475]: DEBUG oslo_concurrency.lockutils [req-f2118d9c-9e7a-4f04-b3ee-8f47933b1a12 req-e9063c3e-fa40-480d-91b7-acc3c115e868 service nova] Acquiring lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 962.702810] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 962.702938] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 962.703136] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 962.703365] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 962.703564] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 962.703754] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 962.704052] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 962.704240] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 962.704453] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 962.704649] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 962.704870] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 962.711086] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4108f2e0-758a-4a3a-af13-2a8d89cd8708 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.738980] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508511, 'name': Rename_Task, 'duration_secs': 0.184447} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.740867] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 962.740867] env[69475]: DEBUG oslo_vmware.api [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 962.740867] env[69475]: value = "task-3508513" [ 962.740867] env[69475]: _type = "Task" [ 962.740867] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.740867] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e07ea3b2-3f23-4602-8f4b-548a52db894b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.752144] env[69475]: DEBUG oslo_vmware.api [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508513, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.753523] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 962.753523] env[69475]: value = "task-3508514" [ 962.753523] env[69475]: _type = "Task" [ 962.753523] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.761650] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508514, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.983414] env[69475]: DEBUG oslo_vmware.api [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508506, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.994326] env[69475]: DEBUG oslo_concurrency.lockutils [req-fef4cc50-773e-4e30-a8aa-29898061d5f8 req-491abc52-59fb-4b99-aa11-ecd95bc2497f service nova] Releasing lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.994326] env[69475]: DEBUG oslo_concurrency.lockutils [req-f2118d9c-9e7a-4f04-b3ee-8f47933b1a12 req-e9063c3e-fa40-480d-91b7-acc3c115e868 service nova] Acquired lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.994461] env[69475]: DEBUG nova.network.neutron [req-f2118d9c-9e7a-4f04-b3ee-8f47933b1a12 req-e9063c3e-fa40-480d-91b7-acc3c115e868 service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Refreshing network info cache for port 04c0ec8b-7341-4495-9aa9-5edcc8fd816a {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 963.038706] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aaab4b61-e380-4f6f-9d0f-5d14ae6a4053 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.130s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.046608] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508510, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563157} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.046908] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 41ddf915-343b-46e4-834e-11ab3899242f/41ddf915-343b-46e4-834e-11ab3899242f.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 963.047104] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 963.047405] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b3e61e97-29d1-4b39-9a50-fc9ff7f8e853 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.055500] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 963.055500] env[69475]: value = "task-3508515" [ 963.055500] env[69475]: _type = "Task" [ 963.055500] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.064839] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508515, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.075931] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508508, 'name': CloneVM_Task, 'duration_secs': 1.370459} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.076535] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Created linked-clone VM from snapshot [ 963.077521] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dc045af-b26e-4fed-8c20-3052bed9b362 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.088211] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Uploading image ef95d766-6c51-4d7b-a1cf-c86b9ab8af8d {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 963.110263] env[69475]: DEBUG oslo_vmware.rw_handles [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 963.110263] env[69475]: value = "vm-701064" [ 963.110263] env[69475]: _type = "VirtualMachine" [ 963.110263] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 963.110446] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6f3d38ee-c7a3-423e-acc6-3c2b71ba4470 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.117621] env[69475]: DEBUG oslo_vmware.rw_handles [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lease: (returnval){ [ 963.117621] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528f3096-19ee-43bd-516b-ae02f9b3d70a" [ 963.117621] env[69475]: _type = "HttpNfcLease" [ 963.117621] env[69475]: } obtained for exporting VM: (result){ [ 963.117621] env[69475]: value = "vm-701064" [ 963.117621] env[69475]: _type = "VirtualMachine" [ 963.117621] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 963.117881] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the lease: (returnval){ [ 963.117881] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528f3096-19ee-43bd-516b-ae02f9b3d70a" [ 963.117881] env[69475]: _type = "HttpNfcLease" [ 963.117881] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 963.124590] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 963.124590] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528f3096-19ee-43bd-516b-ae02f9b3d70a" [ 963.124590] env[69475]: _type = "HttpNfcLease" [ 963.124590] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 963.252632] env[69475]: DEBUG oslo_vmware.api [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508513, 'name': ReconfigVM_Task, 'duration_secs': 0.23512} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.252959] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Updating instance 'b8c50d0a-4b3d-4b70-9bd6-8304fa128e59' progress to 33 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 963.267850] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508514, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.448066] env[69475]: DEBUG oslo_concurrency.lockutils [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Acquiring lock "02ba199b-a7dc-421c-a14a-b562da275377" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.448344] env[69475]: DEBUG oslo_concurrency.lockutils [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Lock "02ba199b-a7dc-421c-a14a-b562da275377" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.448575] env[69475]: DEBUG oslo_concurrency.lockutils [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Acquiring lock "02ba199b-a7dc-421c-a14a-b562da275377-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.448759] env[69475]: DEBUG oslo_concurrency.lockutils [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Lock "02ba199b-a7dc-421c-a14a-b562da275377-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.448924] env[69475]: DEBUG oslo_concurrency.lockutils [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Lock "02ba199b-a7dc-421c-a14a-b562da275377-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.450986] env[69475]: INFO nova.compute.manager [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Terminating instance [ 963.483975] env[69475]: DEBUG oslo_vmware.api [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508506, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.570979] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508515, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07456} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.572359] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 963.572780] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-119f47cb-ffa9-4ecd-aa4c-ec4236ace228 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.597211] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 41ddf915-343b-46e4-834e-11ab3899242f/41ddf915-343b-46e4-834e-11ab3899242f.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 963.600404] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-043dcca8-8a50-4197-a330-e346f267457a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.624910] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 963.624910] env[69475]: value = "task-3508519" [ 963.624910] env[69475]: _type = "Task" [ 963.624910] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.633417] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 963.633417] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528f3096-19ee-43bd-516b-ae02f9b3d70a" [ 963.633417] env[69475]: _type = "HttpNfcLease" [ 963.633417] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 963.633417] env[69475]: DEBUG oslo_vmware.rw_handles [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 963.633417] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528f3096-19ee-43bd-516b-ae02f9b3d70a" [ 963.633417] env[69475]: _type = "HttpNfcLease" [ 963.633417] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 963.633417] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aa95c83-ab61-408e-ae37-a36016926c94 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.640142] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508519, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.650010] env[69475]: DEBUG oslo_vmware.rw_handles [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526754c4-0cde-fc9b-88da-63288a38ec9c/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 963.650235] env[69475]: DEBUG oslo_vmware.rw_handles [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526754c4-0cde-fc9b-88da-63288a38ec9c/disk-0.vmdk for reading. {{(pid=69475) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 963.759464] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 963.759714] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 963.759866] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 963.760054] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 963.760238] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 963.760404] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 963.760609] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 963.760766] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 963.760949] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 963.761142] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 963.761329] env[69475]: DEBUG nova.virt.hardware [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 963.767272] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Reconfiguring VM instance instance-0000004e to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 963.770370] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b59e6e4a-e350-44c1-a8f1-275e564faea5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.793864] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508514, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.796881] env[69475]: DEBUG oslo_vmware.api [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 963.796881] env[69475]: value = "task-3508520" [ 963.796881] env[69475]: _type = "Task" [ 963.796881] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.805767] env[69475]: DEBUG oslo_vmware.api [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508520, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.841111] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-408448f3-5bd7-4506-873d-d28390ee9de0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.849247] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "eadfea6c-3fce-4f54-b889-d994d61ec14f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.851605] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "eadfea6c-3fce-4f54-b889-d994d61ec14f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.851605] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "eadfea6c-3fce-4f54-b889-d994d61ec14f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.851605] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "eadfea6c-3fce-4f54-b889-d994d61ec14f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.851605] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "eadfea6c-3fce-4f54-b889-d994d61ec14f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.856502] env[69475]: INFO nova.compute.manager [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Terminating instance [ 963.956228] env[69475]: DEBUG nova.compute.manager [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 963.956579] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 963.962536] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f198b027-7f5f-4083-afdb-ef028731ff8c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.972706] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 963.973191] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57587439-2e2e-4b0d-bd90-e5e6e7f9d74d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.985478] env[69475]: DEBUG oslo_vmware.api [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for the task: (returnval){ [ 963.985478] env[69475]: value = "task-3508521" [ 963.985478] env[69475]: _type = "Task" [ 963.985478] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.989302] env[69475]: DEBUG oslo_vmware.api [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508506, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.002013] env[69475]: DEBUG oslo_vmware.api [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508521, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.061105] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb21c77-1aba-495e-999b-871786fd2db8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.072454] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3cd3b29-2704-441e-ad0d-b6e74a80b309 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.112472] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd486d9f-fd6f-4043-a231-d09cbb95fc4e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.121893] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315901d9-ade7-4120-b620-debf01b15b6f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.146391] env[69475]: DEBUG nova.compute.provider_tree [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 964.153875] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508519, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.155699] env[69475]: DEBUG nova.network.neutron [req-f2118d9c-9e7a-4f04-b3ee-8f47933b1a12 req-e9063c3e-fa40-480d-91b7-acc3c115e868 service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Updated VIF entry in instance network info cache for port 04c0ec8b-7341-4495-9aa9-5edcc8fd816a. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 964.156344] env[69475]: DEBUG nova.network.neutron [req-f2118d9c-9e7a-4f04-b3ee-8f47933b1a12 req-e9063c3e-fa40-480d-91b7-acc3c115e868 service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Updating instance_info_cache with network_info: [{"id": "04c0ec8b-7341-4495-9aa9-5edcc8fd816a", "address": "fa:16:3e:16:48:72", "network": {"id": "b4898ad4-4e6f-4225-a619-45236df7e9ad", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-809945386-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4d8b5413bb2444538234a0c37633c89f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "073f8535-6b3a-4d21-a754-4c975554dcbf", "external-id": "nsx-vlan-transportzone-111", "segmentation_id": 111, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04c0ec8b-73", "ovs_interfaceid": "04c0ec8b-7341-4495-9aa9-5edcc8fd816a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.280162] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508514, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.310180] env[69475]: DEBUG oslo_vmware.api [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508520, 'name': ReconfigVM_Task, 'duration_secs': 0.427842} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.310180] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Reconfigured VM instance instance-0000004e to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 964.310432] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a56ef108-ad06-45a6-ab1e-742995c09f6c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.336626] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] b8c50d0a-4b3d-4b70-9bd6-8304fa128e59/b8c50d0a-4b3d-4b70-9bd6-8304fa128e59.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 964.336954] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-404eda1d-500b-4aeb-aa44-e3ce8450faa8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.356949] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "e8657a44-d786-4fa6-b39c-28fc71415ce8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.356949] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "e8657a44-d786-4fa6-b39c-28fc71415ce8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.364832] env[69475]: DEBUG nova.compute.manager [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 964.365155] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 964.365663] env[69475]: DEBUG oslo_vmware.api [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 964.365663] env[69475]: value = "task-3508522" [ 964.365663] env[69475]: _type = "Task" [ 964.365663] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.366668] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498da5aa-d057-4fff-b03e-9276dfed663c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.386283] env[69475]: DEBUG oslo_vmware.api [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508522, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.388461] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 964.388987] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d488113-845a-4cca-b0d6-983f520f30e3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.397483] env[69475]: DEBUG oslo_vmware.api [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 964.397483] env[69475]: value = "task-3508523" [ 964.397483] env[69475]: _type = "Task" [ 964.397483] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.408954] env[69475]: DEBUG oslo_vmware.api [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508523, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.488085] env[69475]: DEBUG oslo_vmware.api [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508506, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.500182] env[69475]: DEBUG oslo_vmware.api [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508521, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.641948] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508519, 'name': ReconfigVM_Task, 'duration_secs': 0.621527} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.642450] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 41ddf915-343b-46e4-834e-11ab3899242f/41ddf915-343b-46e4-834e-11ab3899242f.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 964.643278] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-765fcc5b-6bb9-4487-b6a4-1c7a8bafc208 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.649337] env[69475]: DEBUG nova.scheduler.client.report [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 964.654948] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 964.654948] env[69475]: value = "task-3508524" [ 964.654948] env[69475]: _type = "Task" [ 964.654948] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.659877] env[69475]: DEBUG oslo_concurrency.lockutils [req-f2118d9c-9e7a-4f04-b3ee-8f47933b1a12 req-e9063c3e-fa40-480d-91b7-acc3c115e868 service nova] Releasing lock "refresh_cache-02ba199b-a7dc-421c-a14a-b562da275377" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.667014] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508524, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.780816] env[69475]: DEBUG oslo_vmware.api [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508514, 'name': PowerOnVM_Task, 'duration_secs': 1.671883} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.781949] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 964.782190] env[69475]: INFO nova.compute.manager [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Took 10.29 seconds to spawn the instance on the hypervisor. [ 964.782434] env[69475]: DEBUG nova.compute.manager [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 964.783845] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bbbd3b9-4eb8-4a93-b5d7-a76a152babff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.832024] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 964.832024] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 964.863342] env[69475]: DEBUG nova.compute.manager [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 964.894156] env[69475]: DEBUG oslo_vmware.api [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508522, 'name': ReconfigVM_Task, 'duration_secs': 0.436077} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.894156] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Reconfigured VM instance instance-0000004e to attach disk [datastore1] b8c50d0a-4b3d-4b70-9bd6-8304fa128e59/b8c50d0a-4b3d-4b70-9bd6-8304fa128e59.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 964.894156] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Updating instance 'b8c50d0a-4b3d-4b70-9bd6-8304fa128e59' progress to 50 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 964.910111] env[69475]: DEBUG oslo_vmware.api [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508523, 'name': PowerOffVM_Task, 'duration_secs': 0.271425} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.910408] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 964.910862] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 964.911255] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0331ac3-928b-44de-a189-abe2285e461b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.952072] env[69475]: DEBUG oslo_concurrency.lockutils [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "f222cc16-7581-41ff-ae7c-0538c7b3c721" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.952806] env[69475]: DEBUG oslo_concurrency.lockutils [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "f222cc16-7581-41ff-ae7c-0538c7b3c721" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.992163] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 964.992812] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 964.994113] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Deleting the datastore file [datastore1] eadfea6c-3fce-4f54-b889-d994d61ec14f {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 964.994113] env[69475]: DEBUG oslo_vmware.api [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508506, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.994113] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2302ccbc-8246-43be-876a-24210d8ffc99 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.006376] env[69475]: DEBUG oslo_vmware.api [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508521, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.008492] env[69475]: DEBUG oslo_vmware.api [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 965.008492] env[69475]: value = "task-3508526" [ 965.008492] env[69475]: _type = "Task" [ 965.008492] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.021612] env[69475]: DEBUG oslo_vmware.api [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508526, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.158208] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.678s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.159266] env[69475]: DEBUG nova.compute.manager [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 965.162398] env[69475]: DEBUG oslo_concurrency.lockutils [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 23.637s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.177415] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508524, 'name': Rename_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.310137] env[69475]: INFO nova.compute.manager [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Took 39.73 seconds to build instance. [ 965.337333] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 965.337573] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 965.337884] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 965.338175] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 965.338350] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 965.338520] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 965.338671] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69475) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 965.339101] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager.update_available_resource {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 965.388854] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.401560] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58060c4c-6bd1-4159-b456-722e5c6e35f6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.424309] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f8ab8cc-d66d-4c37-bdc6-98f8b0cca1d8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.452487] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Updating instance 'b8c50d0a-4b3d-4b70-9bd6-8304fa128e59' progress to 67 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 965.458057] env[69475]: DEBUG nova.compute.manager [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 965.488629] env[69475]: DEBUG oslo_vmware.api [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508506, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.505031] env[69475]: DEBUG oslo_vmware.api [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508521, 'name': PowerOffVM_Task, 'duration_secs': 1.233339} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.506639] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.506639] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 965.506916] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-45de3c70-6402-4ee8-a995-882b6622a7ec {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.519455] env[69475]: DEBUG oslo_vmware.api [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508526, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.30655} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.519828] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 965.520042] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 965.520345] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 965.520520] env[69475]: INFO nova.compute.manager [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Took 1.16 seconds to destroy the instance on the hypervisor. [ 965.520775] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 965.521202] env[69475]: DEBUG nova.compute.manager [-] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 965.521344] env[69475]: DEBUG nova.network.neutron [-] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 965.588280] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 965.589452] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 965.589452] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Deleting the datastore file [datastore1] 02ba199b-a7dc-421c-a14a-b562da275377 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 965.589838] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-44db3095-e340-4b24-ae6a-1c862120c971 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.598046] env[69475]: DEBUG oslo_vmware.api [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for the task: (returnval){ [ 965.598046] env[69475]: value = "task-3508529" [ 965.598046] env[69475]: _type = "Task" [ 965.598046] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.608321] env[69475]: DEBUG oslo_vmware.api [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508529, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.673086] env[69475]: DEBUG nova.objects.instance [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lazy-loading 'migration_context' on Instance uuid b1b04eb9-ded6-4425-8a06-0c26c086a09b {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 965.675460] env[69475]: DEBUG nova.compute.utils [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 965.676828] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508524, 'name': Rename_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.677552] env[69475]: DEBUG nova.compute.manager [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 965.677552] env[69475]: DEBUG nova.network.neutron [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 965.724911] env[69475]: DEBUG nova.policy [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba09f56e4fda4fc99602796a0af6cb33', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e87670cfd2b848af98507a5ebf9fab51', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 965.813565] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0d87e256-dd43-4b2f-9e85-0dfc7b384f5e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "ff09407e-93ea-4919-ba5f-b7ee6dd018a4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.249s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.842694] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.994100] env[69475]: DEBUG oslo_vmware.api [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508506, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.995192] env[69475]: DEBUG oslo_concurrency.lockutils [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 966.033685] env[69475]: DEBUG nova.network.neutron [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Successfully created port: faddd0cb-2c06-43e5-adff-b74e725a50ba {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 966.105802] env[69475]: DEBUG nova.network.neutron [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Port afb4cf7c-0e25-4b9a-8f0d-90f08fecda68 binding to destination host cpu-1 is already ACTIVE {{(pid=69475) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 966.116496] env[69475]: DEBUG oslo_vmware.api [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Task: {'id': task-3508529, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.251389} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.116910] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 966.117123] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 966.118109] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 966.118483] env[69475]: INFO nova.compute.manager [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Took 2.16 seconds to destroy the instance on the hypervisor. [ 966.118610] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 966.118868] env[69475]: DEBUG nova.compute.manager [-] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 966.118971] env[69475]: DEBUG nova.network.neutron [-] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 966.174063] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508524, 'name': Rename_Task, 'duration_secs': 1.264617} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.174471] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 966.174808] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a02d323-1a1f-40ea-9ec5-30d9c1db5599 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.180012] env[69475]: DEBUG nova.compute.manager [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 966.188384] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 966.188384] env[69475]: value = "task-3508530" [ 966.188384] env[69475]: _type = "Task" [ 966.188384] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.199424] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508530, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.489634] env[69475]: DEBUG oslo_vmware.api [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508506, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.557623] env[69475]: DEBUG nova.network.neutron [-] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.616196] env[69475]: DEBUG nova.compute.manager [req-f540e43d-5655-4550-a0da-a7aa8358c3d7 req-58133838-085e-4f97-a2d1-85af378db867 service nova] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Received event network-vif-deleted-1bb10344-b9bf-42e7-9ee2-2b246b9975fa {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 966.623317] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f93be42-edd2-48ef-94ad-67942cf8e037 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.632221] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8900efd-c16e-45e6-83d5-8291ad4644c1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.679266] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f41f69dd-5ee7-4f2d-b209-895ce018d800 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.687327] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec4ae52-6556-4413-8d18-0d15d2219c81 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.711136] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508530, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.718644] env[69475]: DEBUG nova.compute.provider_tree [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 966.993372] env[69475]: DEBUG oslo_vmware.api [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508506, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.060539] env[69475]: INFO nova.compute.manager [-] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Took 1.54 seconds to deallocate network for instance. [ 967.130786] env[69475]: DEBUG oslo_concurrency.lockutils [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "b8c50d0a-4b3d-4b70-9bd6-8304fa128e59-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.131076] env[69475]: DEBUG oslo_concurrency.lockutils [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "b8c50d0a-4b3d-4b70-9bd6-8304fa128e59-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.131258] env[69475]: DEBUG oslo_concurrency.lockutils [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "b8c50d0a-4b3d-4b70-9bd6-8304fa128e59-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.203883] env[69475]: DEBUG nova.compute.manager [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 967.215276] env[69475]: DEBUG oslo_vmware.api [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508530, 'name': PowerOnVM_Task, 'duration_secs': 0.70314} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.215276] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 967.215457] env[69475]: INFO nova.compute.manager [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Took 10.05 seconds to spawn the instance on the hypervisor. [ 967.215641] env[69475]: DEBUG nova.compute.manager [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 967.216523] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be29f5f8-a1b4-4f83-9f75-3a3f7d79d6cb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.222152] env[69475]: DEBUG nova.scheduler.client.report [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 967.254159] env[69475]: DEBUG nova.virt.hardware [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 967.254676] env[69475]: DEBUG nova.virt.hardware [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 967.258019] env[69475]: DEBUG nova.virt.hardware [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 967.258019] env[69475]: DEBUG nova.virt.hardware [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 967.258019] env[69475]: DEBUG nova.virt.hardware [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 967.258019] env[69475]: DEBUG nova.virt.hardware [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 967.258019] env[69475]: DEBUG nova.virt.hardware [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 967.258019] env[69475]: DEBUG nova.virt.hardware [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 967.258019] env[69475]: DEBUG nova.virt.hardware [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 967.258019] env[69475]: DEBUG nova.virt.hardware [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 967.258019] env[69475]: DEBUG nova.virt.hardware [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 967.258019] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f46d697-04c4-4431-bd58-673935b8a748 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.268364] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cedba254-5a8b-4947-a188-85f6b6dee2fe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.298316] env[69475]: DEBUG nova.network.neutron [-] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.482798] env[69475]: DEBUG nova.compute.manager [req-3db94d19-53a2-4558-98bf-39515aca81c5 req-46beb321-96e5-47aa-8f33-e1f64656f26c service nova] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Received event network-vif-plugged-faddd0cb-2c06-43e5-adff-b74e725a50ba {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 967.483232] env[69475]: DEBUG oslo_concurrency.lockutils [req-3db94d19-53a2-4558-98bf-39515aca81c5 req-46beb321-96e5-47aa-8f33-e1f64656f26c service nova] Acquiring lock "dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.483615] env[69475]: DEBUG oslo_concurrency.lockutils [req-3db94d19-53a2-4558-98bf-39515aca81c5 req-46beb321-96e5-47aa-8f33-e1f64656f26c service nova] Lock "dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 967.485783] env[69475]: DEBUG oslo_concurrency.lockutils [req-3db94d19-53a2-4558-98bf-39515aca81c5 req-46beb321-96e5-47aa-8f33-e1f64656f26c service nova] Lock "dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.485783] env[69475]: DEBUG nova.compute.manager [req-3db94d19-53a2-4558-98bf-39515aca81c5 req-46beb321-96e5-47aa-8f33-e1f64656f26c service nova] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] No waiting events found dispatching network-vif-plugged-faddd0cb-2c06-43e5-adff-b74e725a50ba {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 967.485783] env[69475]: WARNING nova.compute.manager [req-3db94d19-53a2-4558-98bf-39515aca81c5 req-46beb321-96e5-47aa-8f33-e1f64656f26c service nova] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Received unexpected event network-vif-plugged-faddd0cb-2c06-43e5-adff-b74e725a50ba for instance with vm_state building and task_state spawning. [ 967.494737] env[69475]: DEBUG oslo_vmware.api [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508506, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.568365] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 967.594548] env[69475]: DEBUG nova.network.neutron [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Successfully updated port: faddd0cb-2c06-43e5-adff-b74e725a50ba {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 967.743838] env[69475]: INFO nova.compute.manager [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Took 38.56 seconds to build instance. [ 967.801539] env[69475]: INFO nova.compute.manager [-] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Took 1.68 seconds to deallocate network for instance. [ 968.000586] env[69475]: DEBUG oslo_vmware.api [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508506, 'name': ReconfigVM_Task, 'duration_secs': 6.788065} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.000586] env[69475]: DEBUG oslo_concurrency.lockutils [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.000586] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Reconfigured VM to detach interface {{(pid=69475) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 968.097395] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "refresh_cache-dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.097395] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "refresh_cache-dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.097395] env[69475]: DEBUG nova.network.neutron [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 968.181745] env[69475]: DEBUG oslo_concurrency.lockutils [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "refresh_cache-b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.181964] env[69475]: DEBUG oslo_concurrency.lockutils [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "refresh_cache-b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.182175] env[69475]: DEBUG nova.network.neutron [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 968.243745] env[69475]: DEBUG oslo_concurrency.lockutils [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.081s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.253177] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.306s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.253535] env[69475]: DEBUG nova.objects.instance [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lazy-loading 'resources' on Instance uuid a87da6e4-d7ec-4624-94bc-b76ade04d511 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 968.254731] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9e59ae4-c61e-4668-860a-9fc0e24ff466 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "41ddf915-343b-46e4-834e-11ab3899242f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.084s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.308421] env[69475]: DEBUG oslo_concurrency.lockutils [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.652650] env[69475]: DEBUG nova.compute.manager [req-9213ba50-3097-433f-a64a-6d0c3ba6a700 req-aaa5866e-a4e3-4c25-ac0b-d7a115063497 service nova] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Received event network-vif-deleted-04c0ec8b-7341-4495-9aa9-5edcc8fd816a {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 968.652877] env[69475]: DEBUG nova.compute.manager [req-9213ba50-3097-433f-a64a-6d0c3ba6a700 req-aaa5866e-a4e3-4c25-ac0b-d7a115063497 service nova] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Received event network-changed-de52f276-c28b-45f5-8248-9019b9765828 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 968.653577] env[69475]: DEBUG nova.compute.manager [req-9213ba50-3097-433f-a64a-6d0c3ba6a700 req-aaa5866e-a4e3-4c25-ac0b-d7a115063497 service nova] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Refreshing instance network info cache due to event network-changed-de52f276-c28b-45f5-8248-9019b9765828. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 968.654300] env[69475]: DEBUG oslo_concurrency.lockutils [req-9213ba50-3097-433f-a64a-6d0c3ba6a700 req-aaa5866e-a4e3-4c25-ac0b-d7a115063497 service nova] Acquiring lock "refresh_cache-ff09407e-93ea-4919-ba5f-b7ee6dd018a4" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 968.654520] env[69475]: DEBUG oslo_concurrency.lockutils [req-9213ba50-3097-433f-a64a-6d0c3ba6a700 req-aaa5866e-a4e3-4c25-ac0b-d7a115063497 service nova] Acquired lock "refresh_cache-ff09407e-93ea-4919-ba5f-b7ee6dd018a4" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 968.654776] env[69475]: DEBUG nova.network.neutron [req-9213ba50-3097-433f-a64a-6d0c3ba6a700 req-aaa5866e-a4e3-4c25-ac0b-d7a115063497 service nova] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Refreshing network info cache for port de52f276-c28b-45f5-8248-9019b9765828 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 968.657203] env[69475]: DEBUG nova.network.neutron [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 968.731722] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.836105] env[69475]: DEBUG nova.network.neutron [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Updating instance_info_cache with network_info: [{"id": "faddd0cb-2c06-43e5-adff-b74e725a50ba", "address": "fa:16:3e:c0:81:5c", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfaddd0cb-2c", "ovs_interfaceid": "faddd0cb-2c06-43e5-adff-b74e725a50ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.167178] env[69475]: DEBUG nova.network.neutron [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Updating instance_info_cache with network_info: [{"id": "afb4cf7c-0e25-4b9a-8f0d-90f08fecda68", "address": "fa:16:3e:29:af:35", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafb4cf7c-0e", "ovs_interfaceid": "afb4cf7c-0e25-4b9a-8f0d-90f08fecda68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.218458] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e139ce-3f12-4dcb-94d4-c63a154d9e8e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.230397] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f517c0-ebcc-48af-8bc6-f154ff6906ec {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.266046] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edba2036-0556-496d-befb-edec1b6ea078 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.274731] env[69475]: DEBUG oslo_concurrency.lockutils [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "refresh_cache-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.275030] env[69475]: DEBUG oslo_concurrency.lockutils [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "refresh_cache-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.275254] env[69475]: DEBUG nova.network.neutron [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 969.284246] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9432238-9e92-4642-bce0-487612ad9457 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.300435] env[69475]: DEBUG nova.compute.provider_tree [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 969.338536] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "refresh_cache-dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.338879] env[69475]: DEBUG nova.compute.manager [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Instance network_info: |[{"id": "faddd0cb-2c06-43e5-adff-b74e725a50ba", "address": "fa:16:3e:c0:81:5c", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfaddd0cb-2c", "ovs_interfaceid": "faddd0cb-2c06-43e5-adff-b74e725a50ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 969.339361] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:81:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee9ce73d-4ee8-4b28-b7d3-3a5735039627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'faddd0cb-2c06-43e5-adff-b74e725a50ba', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 969.348835] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 969.349928] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 969.350182] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-423280a7-a0b8-4363-90bf-0936e06d5221 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.375465] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 969.375465] env[69475]: value = "task-3508531" [ 969.375465] env[69475]: _type = "Task" [ 969.375465] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.383616] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508531, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.445234] env[69475]: DEBUG nova.network.neutron [req-9213ba50-3097-433f-a64a-6d0c3ba6a700 req-aaa5866e-a4e3-4c25-ac0b-d7a115063497 service nova] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Updated VIF entry in instance network info cache for port de52f276-c28b-45f5-8248-9019b9765828. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 969.446125] env[69475]: DEBUG nova.network.neutron [req-9213ba50-3097-433f-a64a-6d0c3ba6a700 req-aaa5866e-a4e3-4c25-ac0b-d7a115063497 service nova] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Updating instance_info_cache with network_info: [{"id": "de52f276-c28b-45f5-8248-9019b9765828", "address": "fa:16:3e:10:a6:34", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.200", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde52f276-c2", "ovs_interfaceid": "de52f276-c28b-45f5-8248-9019b9765828", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 969.644697] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.645043] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.645298] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.645552] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.645768] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.649426] env[69475]: INFO nova.compute.manager [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Terminating instance [ 969.669970] env[69475]: DEBUG oslo_concurrency.lockutils [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "refresh_cache-b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.792385] env[69475]: DEBUG nova.compute.manager [req-413a93b8-b50d-4f55-bdc4-259bc356b060 req-afcca21c-5083-41b4-9579-05f8f2811e6b service nova] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Received event network-changed-faddd0cb-2c06-43e5-adff-b74e725a50ba {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 969.793063] env[69475]: DEBUG nova.compute.manager [req-413a93b8-b50d-4f55-bdc4-259bc356b060 req-afcca21c-5083-41b4-9579-05f8f2811e6b service nova] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Refreshing instance network info cache due to event network-changed-faddd0cb-2c06-43e5-adff-b74e725a50ba. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 969.793063] env[69475]: DEBUG oslo_concurrency.lockutils [req-413a93b8-b50d-4f55-bdc4-259bc356b060 req-afcca21c-5083-41b4-9579-05f8f2811e6b service nova] Acquiring lock "refresh_cache-dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.793244] env[69475]: DEBUG oslo_concurrency.lockutils [req-413a93b8-b50d-4f55-bdc4-259bc356b060 req-afcca21c-5083-41b4-9579-05f8f2811e6b service nova] Acquired lock "refresh_cache-dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.793832] env[69475]: DEBUG nova.network.neutron [req-413a93b8-b50d-4f55-bdc4-259bc356b060 req-afcca21c-5083-41b4-9579-05f8f2811e6b service nova] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Refreshing network info cache for port faddd0cb-2c06-43e5-adff-b74e725a50ba {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 969.801028] env[69475]: INFO nova.compute.manager [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Swapping old allocation on dict_keys(['dd221100-68c1-4a75-92b5-b24d81fee5da']) held by migration 5f455cf6-0177-44bd-8ddf-bf490d77efd6 for instance [ 969.803819] env[69475]: DEBUG nova.scheduler.client.report [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 969.828491] env[69475]: DEBUG nova.scheduler.client.report [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Overwriting current allocation {'allocations': {'dd221100-68c1-4a75-92b5-b24d81fee5da': {'resources': {'VCPU': 1, 'MEMORY_MB': 256, 'DISK_GB': 1}, 'generation': 122}}, 'project_id': 'dde7ecd407ae48f6a5d1b791df065d6e', 'user_id': '2cadefdf967f4ef1b0c24f7bb0b7d6d3', 'consumer_generation': 1} on consumer b1b04eb9-ded6-4425-8a06-0c26c086a09b {{(pid=69475) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 969.886179] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508531, 'name': CreateVM_Task, 'duration_secs': 0.417135} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.889502] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 969.890568] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.891355] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.891355] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 969.891942] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4568c9a-e705-48de-b82a-bbf632ceda62 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.897710] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 969.897710] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c828fe-a743-3802-684d-97fb80af946c" [ 969.897710] env[69475]: _type = "Task" [ 969.897710] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.911155] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c828fe-a743-3802-684d-97fb80af946c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.946207] env[69475]: DEBUG oslo_concurrency.lockutils [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "refresh_cache-b1b04eb9-ded6-4425-8a06-0c26c086a09b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.946402] env[69475]: DEBUG oslo_concurrency.lockutils [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquired lock "refresh_cache-b1b04eb9-ded6-4425-8a06-0c26c086a09b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.946583] env[69475]: DEBUG nova.network.neutron [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 969.952020] env[69475]: DEBUG oslo_concurrency.lockutils [req-9213ba50-3097-433f-a64a-6d0c3ba6a700 req-aaa5866e-a4e3-4c25-ac0b-d7a115063497 service nova] Releasing lock "refresh_cache-ff09407e-93ea-4919-ba5f-b7ee6dd018a4" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.039452] env[69475]: INFO nova.network.neutron [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Port eb5ab964-44c1-4189-9805-f3c80abb01ca from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 970.039452] env[69475]: DEBUG nova.network.neutron [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Updating instance_info_cache with network_info: [{"id": "30f10284-138a-4774-b024-33ffa906ef81", "address": "fa:16:3e:49:ea:cb", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30f10284-13", "ovs_interfaceid": "30f10284-138a-4774-b024-33ffa906ef81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.154111] env[69475]: DEBUG nova.compute.manager [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 970.155232] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 970.155926] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951679ab-6d38-4b4d-bfe8-d75d0907112f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.163876] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 970.164238] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0ad113ac-4e2a-4255-82af-a6e686d23fac {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.170884] env[69475]: DEBUG oslo_vmware.api [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 970.170884] env[69475]: value = "task-3508532" [ 970.170884] env[69475]: _type = "Task" [ 970.170884] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.181766] env[69475]: DEBUG oslo_vmware.api [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508532, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.196051] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68228f3a-090d-4776-95ef-6d854c66d6c7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.217929] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e351902-01d4-4e49-ae23-b2dbfe95b397 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.226486] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Updating instance 'b8c50d0a-4b3d-4b70-9bd6-8304fa128e59' progress to 83 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 970.308936] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.056s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.311433] env[69475]: DEBUG oslo_concurrency.lockutils [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.589s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.311818] env[69475]: DEBUG nova.objects.instance [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lazy-loading 'resources' on Instance uuid 235653ac-a893-4f42-a394-dd81f61f0d73 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.329321] env[69475]: INFO nova.scheduler.client.report [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Deleted allocations for instance a87da6e4-d7ec-4624-94bc-b76ade04d511 [ 970.409827] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c828fe-a743-3802-684d-97fb80af946c, 'name': SearchDatastore_Task, 'duration_secs': 0.01064} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.412467] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.412704] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 970.412941] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.413167] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 970.413391] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 970.413659] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e365502b-674c-4987-a711-0cc2a76adae2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.422697] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 970.423096] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 970.423891] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20446cb9-ea3f-4be5-87a9-9cf4d3726242 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.429726] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 970.429726] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52403c58-77bc-70ea-ae4d-afbcc8cc6dc2" [ 970.429726] env[69475]: _type = "Task" [ 970.429726] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.438290] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52403c58-77bc-70ea-ae4d-afbcc8cc6dc2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.517260] env[69475]: DEBUG nova.network.neutron [req-413a93b8-b50d-4f55-bdc4-259bc356b060 req-afcca21c-5083-41b4-9579-05f8f2811e6b service nova] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Updated VIF entry in instance network info cache for port faddd0cb-2c06-43e5-adff-b74e725a50ba. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 970.517689] env[69475]: DEBUG nova.network.neutron [req-413a93b8-b50d-4f55-bdc4-259bc356b060 req-afcca21c-5083-41b4-9579-05f8f2811e6b service nova] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Updating instance_info_cache with network_info: [{"id": "faddd0cb-2c06-43e5-adff-b74e725a50ba", "address": "fa:16:3e:c0:81:5c", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfaddd0cb-2c", "ovs_interfaceid": "faddd0cb-2c06-43e5-adff-b74e725a50ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.543232] env[69475]: DEBUG oslo_concurrency.lockutils [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "refresh_cache-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.690023] env[69475]: DEBUG oslo_vmware.api [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508532, 'name': PowerOffVM_Task, 'duration_secs': 0.195376} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.690023] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 970.690023] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 970.690023] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4b20d4e-abe1-4790-b20c-7b9cfdac61a6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.700174] env[69475]: DEBUG nova.compute.manager [req-1a62acc9-e493-4b0b-a5a8-d915d65b2f39 req-4b209326-f143-45c7-84ea-d2d603a45ddb service nova] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Received event network-changed-f493e873-49de-4112-9562-cbb7d23892c8 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 970.700380] env[69475]: DEBUG nova.compute.manager [req-1a62acc9-e493-4b0b-a5a8-d915d65b2f39 req-4b209326-f143-45c7-84ea-d2d603a45ddb service nova] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Refreshing instance network info cache due to event network-changed-f493e873-49de-4112-9562-cbb7d23892c8. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 970.700624] env[69475]: DEBUG oslo_concurrency.lockutils [req-1a62acc9-e493-4b0b-a5a8-d915d65b2f39 req-4b209326-f143-45c7-84ea-d2d603a45ddb service nova] Acquiring lock "refresh_cache-41ddf915-343b-46e4-834e-11ab3899242f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 970.700726] env[69475]: DEBUG oslo_concurrency.lockutils [req-1a62acc9-e493-4b0b-a5a8-d915d65b2f39 req-4b209326-f143-45c7-84ea-d2d603a45ddb service nova] Acquired lock "refresh_cache-41ddf915-343b-46e4-834e-11ab3899242f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 970.700886] env[69475]: DEBUG nova.network.neutron [req-1a62acc9-e493-4b0b-a5a8-d915d65b2f39 req-4b209326-f143-45c7-84ea-d2d603a45ddb service nova] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Refreshing network info cache for port f493e873-49de-4112-9562-cbb7d23892c8 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 970.734079] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 970.735208] env[69475]: DEBUG nova.network.neutron [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Updating instance_info_cache with network_info: [{"id": "f9a10762-ba87-425f-9623-1ffdf22c5bb4", "address": "fa:16:3e:83:ce:8a", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.106", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9a10762-ba", "ovs_interfaceid": "f9a10762-ba87-425f-9623-1ffdf22c5bb4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.735554] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f919d63-7eca-41f8-8ff3-960e5068479f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.743594] env[69475]: DEBUG oslo_vmware.api [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 970.743594] env[69475]: value = "task-3508534" [ 970.743594] env[69475]: _type = "Task" [ 970.743594] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.756130] env[69475]: DEBUG oslo_vmware.api [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508534, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.757616] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 970.757805] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 970.757983] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Deleting the datastore file [datastore2] f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 970.762665] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-719e431f-dad4-437c-a168-c9cbf94ae1c4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.770929] env[69475]: DEBUG oslo_vmware.api [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 970.770929] env[69475]: value = "task-3508535" [ 970.770929] env[69475]: _type = "Task" [ 970.770929] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.779747] env[69475]: DEBUG oslo_vmware.api [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508535, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.839906] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0e9c3ebd-0e72-4534-a93d-a2caf4a8d99a tempest-ServersWithSpecificFlavorTestJSON-349924692 tempest-ServersWithSpecificFlavorTestJSON-349924692-project-member] Lock "a87da6e4-d7ec-4624-94bc-b76ade04d511" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.122s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.944777] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52403c58-77bc-70ea-ae4d-afbcc8cc6dc2, 'name': SearchDatastore_Task, 'duration_secs': 0.0088} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.949172] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcbf08f6-2f42-4784-b6ec-8676891c41de {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.955645] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 970.955645] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52302c91-409d-72b0-f675-7d55a7f145bd" [ 970.955645] env[69475]: _type = "Task" [ 970.955645] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.964801] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52302c91-409d-72b0-f675-7d55a7f145bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.022155] env[69475]: DEBUG oslo_concurrency.lockutils [req-413a93b8-b50d-4f55-bdc4-259bc356b060 req-afcca21c-5083-41b4-9579-05f8f2811e6b service nova] Releasing lock "refresh_cache-dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 971.022449] env[69475]: DEBUG nova.compute.manager [req-413a93b8-b50d-4f55-bdc4-259bc356b060 req-afcca21c-5083-41b4-9579-05f8f2811e6b service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Received event network-vif-deleted-eb5ab964-44c1-4189-9805-f3c80abb01ca {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 971.022660] env[69475]: INFO nova.compute.manager [req-413a93b8-b50d-4f55-bdc4-259bc356b060 req-afcca21c-5083-41b4-9579-05f8f2811e6b service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Neutron deleted interface eb5ab964-44c1-4189-9805-f3c80abb01ca; detaching it from the instance and deleting it from the info cache [ 971.023142] env[69475]: DEBUG nova.network.neutron [req-413a93b8-b50d-4f55-bdc4-259bc356b060 req-afcca21c-5083-41b4-9579-05f8f2811e6b service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Updating instance_info_cache with network_info: [{"id": "30f10284-138a-4774-b024-33ffa906ef81", "address": "fa:16:3e:49:ea:cb", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30f10284-13", "ovs_interfaceid": "30f10284-138a-4774-b024-33ffa906ef81", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 971.047158] env[69475]: DEBUG oslo_concurrency.lockutils [None req-28b51d38-faf0-4f55-9c6f-b51710cec9cf tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0-eb5ab964-44c1-4189-9805-f3c80abb01ca" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.670s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.240675] env[69475]: DEBUG oslo_concurrency.lockutils [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Releasing lock "refresh_cache-b1b04eb9-ded6-4425-8a06-0c26c086a09b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 971.241889] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 971.243125] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c76402fc-3d2c-44c1-ba3a-6e1835523175 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.249682] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-129b0317-904a-4664-9375-e98a537e0b85 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.958196] env[69475]: DEBUG oslo_concurrency.lockutils [req-413a93b8-b50d-4f55-bdc4-259bc356b060 req-afcca21c-5083-41b4-9579-05f8f2811e6b service nova] Acquiring lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 971.968207] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 971.968207] env[69475]: value = "task-3508536" [ 971.968207] env[69475]: _type = "Task" [ 971.968207] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.972264] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dabaf90f-d29f-4cff-b5df-53ab1eb1b62f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.975372] env[69475]: DEBUG oslo_vmware.api [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508534, 'name': PowerOnVM_Task, 'duration_secs': 0.515295} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.984332] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 971.984555] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-55678141-30ba-47dd-b895-c297c14cf6a3 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Updating instance 'b8c50d0a-4b3d-4b70-9bd6-8304fa128e59' progress to 100 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 971.988252] env[69475]: DEBUG oslo_vmware.api [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508535, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178646} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.988453] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52302c91-409d-72b0-f675-7d55a7f145bd, 'name': SearchDatastore_Task, 'duration_secs': 0.008897} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.989763] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 971.989860] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 971.990010] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 971.990277] env[69475]: INFO nova.compute.manager [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Took 1.84 seconds to destroy the instance on the hypervisor. [ 971.990414] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 971.990617] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 971.990854] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac/dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 971.991513] env[69475]: DEBUG nova.compute.manager [-] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 971.991617] env[69475]: DEBUG nova.network.neutron [-] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 971.993303] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7f1bc92-f1a8-4a80-bffd-f8cfe450b08f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.023681] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508536, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.029374] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39bd3ff5-ddc8-4a54-bc86-918e3b04fd47 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.038024] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 972.038024] env[69475]: value = "task-3508537" [ 972.038024] env[69475]: _type = "Task" [ 972.038024] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.046389] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cbd5bfa-7a1d-4b85-a1d7-3e376c20b46a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.057653] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508537, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.080497] env[69475]: DEBUG nova.compute.provider_tree [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.091392] env[69475]: DEBUG oslo_vmware.rw_handles [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526754c4-0cde-fc9b-88da-63288a38ec9c/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 972.092491] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483a4325-6f9e-4c83-8d4c-c346558a286f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.099717] env[69475]: DEBUG oslo_vmware.rw_handles [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526754c4-0cde-fc9b-88da-63288a38ec9c/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 972.099717] env[69475]: ERROR oslo_vmware.rw_handles [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526754c4-0cde-fc9b-88da-63288a38ec9c/disk-0.vmdk due to incomplete transfer. [ 972.101346] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-035674fa-fbf6-4c43-9a63-488f4467c4fb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.107879] env[69475]: DEBUG oslo_vmware.rw_handles [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526754c4-0cde-fc9b-88da-63288a38ec9c/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 972.108091] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Uploaded image ef95d766-6c51-4d7b-a1cf-c86b9ab8af8d to the Glance image server {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 972.111595] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 972.111595] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-aa306489-e4e4-4138-be1d-d0aebb72ffb9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.118027] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 972.118027] env[69475]: value = "task-3508538" [ 972.118027] env[69475]: _type = "Task" [ 972.118027] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.126063] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508538, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.489604] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508536, 'name': PowerOffVM_Task, 'duration_secs': 0.339834} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.492420] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 972.493250] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:39:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='2d6dab00-d5b5-4904-b4d4-4a46ef0cf4b2',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2018768159',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 972.494511] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 972.494511] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 972.494511] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 972.494511] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 972.494748] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 972.494990] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 972.495198] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 972.495446] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 972.495690] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 972.495913] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 972.502922] env[69475]: DEBUG nova.network.neutron [req-1a62acc9-e493-4b0b-a5a8-d915d65b2f39 req-4b209326-f143-45c7-84ea-d2d603a45ddb service nova] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Updated VIF entry in instance network info cache for port f493e873-49de-4112-9562-cbb7d23892c8. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 972.503506] env[69475]: DEBUG nova.network.neutron [req-1a62acc9-e493-4b0b-a5a8-d915d65b2f39 req-4b209326-f143-45c7-84ea-d2d603a45ddb service nova] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Updating instance_info_cache with network_info: [{"id": "f493e873-49de-4112-9562-cbb7d23892c8", "address": "fa:16:3e:95:c3:f6", "network": {"id": "b825e883-3197-4a41-a94b-f363fe49fc0d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1376961670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.230", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67d27343d8c04fc9a2bed7a764f6cf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf493e873-49", "ovs_interfaceid": "f493e873-49de-4112-9562-cbb7d23892c8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 972.504776] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09b80cbe-a99f-4990-be50-9bc6c23307c9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.525806] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 972.525806] env[69475]: value = "task-3508539" [ 972.525806] env[69475]: _type = "Task" [ 972.525806] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.537695] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508539, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.549316] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508537, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.585733] env[69475]: DEBUG nova.scheduler.client.report [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 972.629443] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508538, 'name': Destroy_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.835610] env[69475]: DEBUG nova.compute.manager [req-87c0363d-af73-4bd1-aeca-7a4c7f350a58 req-0795217b-194b-42ee-b54c-3aedd025a9d6 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Received event network-vif-deleted-30f10284-138a-4774-b024-33ffa906ef81 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 972.835823] env[69475]: INFO nova.compute.manager [req-87c0363d-af73-4bd1-aeca-7a4c7f350a58 req-0795217b-194b-42ee-b54c-3aedd025a9d6 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Neutron deleted interface 30f10284-138a-4774-b024-33ffa906ef81; detaching it from the instance and deleting it from the info cache [ 972.836165] env[69475]: DEBUG nova.network.neutron [req-87c0363d-af73-4bd1-aeca-7a4c7f350a58 req-0795217b-194b-42ee-b54c-3aedd025a9d6 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.018689] env[69475]: DEBUG oslo_concurrency.lockutils [req-1a62acc9-e493-4b0b-a5a8-d915d65b2f39 req-4b209326-f143-45c7-84ea-d2d603a45ddb service nova] Releasing lock "refresh_cache-41ddf915-343b-46e4-834e-11ab3899242f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 973.019561] env[69475]: DEBUG nova.network.neutron [-] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.038864] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508539, 'name': ReconfigVM_Task, 'duration_secs': 0.439964} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.044882] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4129459a-a07f-427f-8b7d-6b6b67c0d89c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.051202] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508537, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.066313] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:39:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='2d6dab00-d5b5-4904-b4d4-4a46ef0cf4b2',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-2018768159',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 973.066616] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 973.066855] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 973.067063] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 973.067252] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 973.067454] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 973.067626] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 973.067817] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 973.068104] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 973.068284] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 973.068619] env[69475]: DEBUG nova.virt.hardware [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 973.069467] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84b50c24-0a42-4971-8a64-253145480b6c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.075855] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 973.075855] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52025e97-1ca4-64ad-4a44-bc6341451dde" [ 973.075855] env[69475]: _type = "Task" [ 973.075855] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.087303] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52025e97-1ca4-64ad-4a44-bc6341451dde, 'name': SearchDatastore_Task, 'duration_secs': 0.007312} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.096031] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Reconfiguring VM instance instance-00000046 to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 973.096204] env[69475]: DEBUG oslo_concurrency.lockutils [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.785s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.098403] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6cdbffd-10f0-4ace-bd33-3d326edf5930 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.114102] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.672s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 973.116950] env[69475]: DEBUG nova.objects.instance [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Lazy-loading 'resources' on Instance uuid 78b5496c-f8e2-4681-a36b-50897b0f7325 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 973.126645] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 973.126645] env[69475]: value = "task-3508540" [ 973.126645] env[69475]: _type = "Task" [ 973.126645] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.136992] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508538, 'name': Destroy_Task, 'duration_secs': 0.516965} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.136992] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Destroyed the VM [ 973.136992] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 973.136992] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-5eeb3284-3c9b-4576-87c3-027f37fc914d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.144886] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508540, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.146863] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 973.146863] env[69475]: value = "task-3508541" [ 973.146863] env[69475]: _type = "Task" [ 973.146863] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.155273] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508541, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.159021] env[69475]: INFO nova.scheduler.client.report [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Deleted allocations for instance 235653ac-a893-4f42-a394-dd81f61f0d73 [ 973.338864] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-691e789b-4517-436a-97ec-69daf99f58b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.351076] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-360e27cd-3f80-4918-bbcf-4a74f7c3270c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.391228] env[69475]: DEBUG nova.compute.manager [req-87c0363d-af73-4bd1-aeca-7a4c7f350a58 req-0795217b-194b-42ee-b54c-3aedd025a9d6 service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Detach interface failed, port_id=30f10284-138a-4774-b024-33ffa906ef81, reason: Instance f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 973.523176] env[69475]: INFO nova.compute.manager [-] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Took 1.53 seconds to deallocate network for instance. [ 973.550059] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508537, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.637998] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508540, 'name': ReconfigVM_Task, 'duration_secs': 0.386895} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.638506] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Reconfigured VM instance instance-00000046 to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 973.639625] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc8cdb4-0bc1-4abf-b5e4-985e059d2063 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.668030] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] b1b04eb9-ded6-4425-8a06-0c26c086a09b/b1b04eb9-ded6-4425-8a06-0c26c086a09b.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 973.674019] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03832b31-51a4-41d5-bb04-3efdceae8b9f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.692423] env[69475]: DEBUG oslo_concurrency.lockutils [None req-05182987-38b8-4916-91b6-aea967705f38 tempest-ServersAdminTestJSON-573324530 tempest-ServersAdminTestJSON-573324530-project-member] Lock "235653ac-a893-4f42-a394-dd81f61f0d73" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.598s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.697979] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508541, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.699980] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 973.699980] env[69475]: value = "task-3508542" [ 973.699980] env[69475]: _type = "Task" [ 973.699980] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 973.711218] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508542, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.029122] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.051251] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508537, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.533046} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.054076] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac/dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 974.054378] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 974.054883] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c8e170ee-6105-4e49-a214-2b1cc744805f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.061766] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 974.061766] env[69475]: value = "task-3508543" [ 974.061766] env[69475]: _type = "Task" [ 974.061766] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.072613] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508543, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.093873] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b85003-cbd4-4be2-b5ea-5dae4e823ed2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.103283] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf07f18-950f-4ef3-8828-dee3c26d3035 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.142310] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56518b70-49c4-4d83-84dd-608a58994181 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.151082] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e40c33b0-4068-48d4-93fc-039cc98c9174 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.169476] env[69475]: DEBUG nova.compute.provider_tree [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 974.175470] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508541, 'name': RemoveSnapshot_Task} progress is 65%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.209350] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508542, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.572587] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508543, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073143} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.572587] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 974.573121] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb89f49-d9cf-4ec0-83b5-6515c4f330e1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.596751] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac/dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 974.597260] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0432960-f3b6-4868-9960-f00339e65627 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.613619] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.613950] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.614201] env[69475]: DEBUG nova.compute.manager [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Going to confirm migration 4 {{(pid=69475) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 974.622791] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 974.622791] env[69475]: value = "task-3508544" [ 974.622791] env[69475]: _type = "Task" [ 974.622791] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.633612] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508544, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 974.666972] env[69475]: DEBUG oslo_vmware.api [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508541, 'name': RemoveSnapshot_Task, 'duration_secs': 1.455334} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.666972] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 974.666972] env[69475]: INFO nova.compute.manager [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Took 15.19 seconds to snapshot the instance on the hypervisor. [ 974.672739] env[69475]: DEBUG nova.scheduler.client.report [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 974.711397] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508542, 'name': ReconfigVM_Task, 'duration_secs': 0.945376} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.711796] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Reconfigured VM instance instance-00000046 to attach disk [datastore1] b1b04eb9-ded6-4425-8a06-0c26c086a09b/b1b04eb9-ded6-4425-8a06-0c26c086a09b.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 974.713556] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2981a5-b3ba-4e94-bce2-700ef2b6642b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.734190] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc7c3eb-5461-4692-9f3d-8efa5df3d391 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.759313] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8662a6-35cf-4e36-b913-c46985e61288 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.783931] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f34dd10-5575-4b8d-86f7-42ed54c1ed02 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.791603] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 974.791933] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3788cfe7-38c0-445f-81d3-2b6d8c58c067 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.798776] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 974.798776] env[69475]: value = "task-3508545" [ 974.798776] env[69475]: _type = "Task" [ 974.798776] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 974.806508] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508545, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.135892] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508544, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.183637] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.066s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.183637] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.609s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.183637] env[69475]: DEBUG nova.objects.instance [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lazy-loading 'resources' on Instance uuid 4b17d080-594b-44e7-83aa-ebe0787722d9 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 975.215208] env[69475]: INFO nova.scheduler.client.report [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Deleted allocations for instance 78b5496c-f8e2-4681-a36b-50897b0f7325 [ 975.229011] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "refresh_cache-b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.229278] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "refresh_cache-b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 975.229457] env[69475]: DEBUG nova.network.neutron [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 975.229678] env[69475]: DEBUG nova.objects.instance [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lazy-loading 'info_cache' on Instance uuid b8c50d0a-4b3d-4b70-9bd6-8304fa128e59 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 975.238979] env[69475]: DEBUG nova.compute.manager [None req-48f8d157-728f-4db9-ac07-6400fdcb9a6e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Found 2 images (rotation: 2) {{(pid=69475) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 975.313244] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508545, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.637553] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508544, 'name': ReconfigVM_Task, 'duration_secs': 0.524421} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.637932] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Reconfigured VM instance instance-00000057 to attach disk [datastore1] dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac/dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 975.639031] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b197e3d-04ba-444c-8049-71c4c460f0ce {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.646536] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 975.646536] env[69475]: value = "task-3508546" [ 975.646536] env[69475]: _type = "Task" [ 975.646536] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 975.656954] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508546, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 975.740777] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a134064a-45b4-476a-8348-75a3aad2cd1e tempest-ServerMetadataNegativeTestJSON-1827145523 tempest-ServerMetadataNegativeTestJSON-1827145523-project-member] Lock "78b5496c-f8e2-4681-a36b-50897b0f7325" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.902s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.815462] env[69475]: DEBUG oslo_vmware.api [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508545, 'name': PowerOnVM_Task, 'duration_secs': 0.533673} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 975.816802] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 976.073351] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0e9a7c-6bef-4ddd-81e4-9465514cb7a4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.082950] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a344d596-50bf-4dfe-809b-a994edb28504 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.115613] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6424b98f-f9dd-4e7d-bdd9-f9edb1a9e77d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.123510] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fea9f79-856a-4084-abe1-613ffbf63709 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.137373] env[69475]: DEBUG nova.compute.provider_tree [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 976.158091] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508546, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.324825] env[69475]: DEBUG nova.compute.manager [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 976.330040] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474e7b47-1287-4901-a0e1-e4d1ebaeb556 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.643273] env[69475]: DEBUG nova.scheduler.client.report [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 976.662410] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508546, 'name': Rename_Task, 'duration_secs': 0.788683} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.664369] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 976.664369] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f53e6534-1d18-4c5e-ad52-05d415806d88 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.672035] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 976.672035] env[69475]: value = "task-3508547" [ 976.672035] env[69475]: _type = "Task" [ 976.672035] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.683594] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508547, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.719277] env[69475]: DEBUG nova.network.neutron [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Updating instance_info_cache with network_info: [{"id": "afb4cf7c-0e25-4b9a-8f0d-90f08fecda68", "address": "fa:16:3e:29:af:35", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapafb4cf7c-0e", "ovs_interfaceid": "afb4cf7c-0e25-4b9a-8f0d-90f08fecda68", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.837149] env[69475]: INFO nova.compute.manager [None req-acece7ec-d678-4f9d-9b56-499ee51f0838 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Updating instance to original state: 'active' [ 976.841509] env[69475]: INFO nova.compute.manager [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] instance snapshotting [ 976.846973] env[69475]: DEBUG nova.objects.instance [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lazy-loading 'flavor' on Instance uuid 82236043-3222-4134-8717-4c239ed12aba {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 977.148730] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.966s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.156619] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.404s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 977.156619] env[69475]: DEBUG nova.objects.instance [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lazy-loading 'resources' on Instance uuid 78430e6a-b0a3-400b-91c4-effea838274a {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 977.192018] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508547, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.192018] env[69475]: INFO nova.scheduler.client.report [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Deleted allocations for instance 4b17d080-594b-44e7-83aa-ebe0787722d9 [ 977.223423] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "refresh_cache-b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 977.223686] env[69475]: DEBUG nova.objects.instance [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lazy-loading 'migration_context' on Instance uuid b8c50d0a-4b3d-4b70-9bd6-8304fa128e59 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 977.358236] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b5ca6eb-8af0-4068-84b0-0ee97c553856 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.397475] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ffa3fa8-072c-4ef5-80a6-5705cceac4a7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.657140] env[69475]: DEBUG nova.objects.instance [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lazy-loading 'numa_topology' on Instance uuid 78430e6a-b0a3-400b-91c4-effea838274a {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 977.684122] env[69475]: DEBUG oslo_vmware.api [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508547, 'name': PowerOnVM_Task, 'duration_secs': 0.892369} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.684446] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 977.684648] env[69475]: INFO nova.compute.manager [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Took 10.48 seconds to spawn the instance on the hypervisor. [ 977.684817] env[69475]: DEBUG nova.compute.manager [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 977.685637] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b96437c-d9bf-4e6c-a9cd-74854e70cfeb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.705064] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d85ba202-2995-41fd-bb28-ec2ccd9ec31a tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "4b17d080-594b-44e7-83aa-ebe0787722d9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.860s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.728948] env[69475]: DEBUG nova.objects.base [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 977.730308] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96f9294-b2d4-4ea7-a94b-54a7dae88010 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.755264] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0114092-7e33-43d7-97dd-cba9e14b9397 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.762609] env[69475]: DEBUG oslo_vmware.api [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 977.762609] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5270e444-2cf5-d471-9b3d-28305e2e8518" [ 977.762609] env[69475]: _type = "Task" [ 977.762609] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.779042] env[69475]: DEBUG oslo_vmware.api [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5270e444-2cf5-d471-9b3d-28305e2e8518, 'name': SearchDatastore_Task, 'duration_secs': 0.013679} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.779358] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 977.912467] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 977.913280] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-657604a6-2061-4516-9186-102efbe28b69 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.922951] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 977.922951] env[69475]: value = "task-3508548" [ 977.922951] env[69475]: _type = "Task" [ 977.922951] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.933733] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508548, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.160748] env[69475]: DEBUG nova.objects.base [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Object Instance<78430e6a-b0a3-400b-91c4-effea838274a> lazy-loaded attributes: resources,numa_topology {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 978.216947] env[69475]: INFO nova.compute.manager [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Took 36.84 seconds to build instance. [ 978.438390] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508548, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.628123] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a0ba9d7-bb38-407e-bf4e-b2c3ef7629e7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.637330] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74eafb2e-e366-49d5-ad04-d42001659dc9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.679883] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e23f800c-4d12-41a6-bdbc-16dd4d6970b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.688639] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8803bc71-9cef-42a3-978a-d800ece55ea4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.705443] env[69475]: DEBUG nova.compute.provider_tree [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.719301] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b60b526d-09d8-4942-91fd-40a0d1156846 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.354s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.765243] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "b1b04eb9-ded6-4425-8a06-0c26c086a09b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.765522] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "b1b04eb9-ded6-4425-8a06-0c26c086a09b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.765731] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "b1b04eb9-ded6-4425-8a06-0c26c086a09b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.765912] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "b1b04eb9-ded6-4425-8a06-0c26c086a09b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.766099] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "b1b04eb9-ded6-4425-8a06-0c26c086a09b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.768701] env[69475]: INFO nova.compute.manager [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Terminating instance [ 978.934731] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508548, 'name': CreateSnapshot_Task, 'duration_secs': 0.70394} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.934731] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 978.934933] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa7d032-f785-4091-8ed1-1f3f808ab937 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.979236] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "9c27dcc3-67df-46ea-947d-b2ecdaeeb003" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.979236] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "9c27dcc3-67df-46ea-947d-b2ecdaeeb003" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.979236] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "9c27dcc3-67df-46ea-947d-b2ecdaeeb003-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.979236] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "9c27dcc3-67df-46ea-947d-b2ecdaeeb003-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.979236] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "9c27dcc3-67df-46ea-947d-b2ecdaeeb003-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.980550] env[69475]: INFO nova.compute.manager [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Terminating instance [ 979.209441] env[69475]: DEBUG nova.scheduler.client.report [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 979.273892] env[69475]: DEBUG nova.compute.manager [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 979.274187] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 979.275117] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34887470-78ad-4bf5-9e6c-3b6df7a98352 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.284100] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 979.284199] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-63110ee7-881a-4bfc-b8b2-a85a24eba304 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.291408] env[69475]: DEBUG oslo_vmware.api [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 979.291408] env[69475]: value = "task-3508549" [ 979.291408] env[69475]: _type = "Task" [ 979.291408] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.454376] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 979.454844] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-74bd66c2-1cce-46cf-a376-7261775ca8bf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.466668] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 979.466668] env[69475]: value = "task-3508550" [ 979.466668] env[69475]: _type = "Task" [ 979.466668] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.479878] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508550, 'name': CloneVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.484792] env[69475]: DEBUG nova.compute.manager [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 979.485077] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 979.485976] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc8c3a3-1f03-4adc-8f24-605454b09473 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.494352] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 979.494603] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e967c04f-53ef-4757-993d-031dce661b24 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.501797] env[69475]: DEBUG oslo_vmware.api [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 979.501797] env[69475]: value = "task-3508551" [ 979.501797] env[69475]: _type = "Task" [ 979.501797] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.511835] env[69475]: DEBUG oslo_vmware.api [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508551, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.554799] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21d0a7c9-6fad-4d50-bc42-1efff2bec8cb tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 979.555294] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21d0a7c9-6fad-4d50-bc42-1efff2bec8cb tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.555712] env[69475]: DEBUG nova.compute.manager [None req-21d0a7c9-6fad-4d50-bc42-1efff2bec8cb tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 979.557552] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a6a74f-440c-49ba-9531-92435f9aada1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.567181] env[69475]: DEBUG nova.compute.manager [None req-21d0a7c9-6fad-4d50-bc42-1efff2bec8cb tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69475) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 979.567881] env[69475]: DEBUG nova.objects.instance [None req-21d0a7c9-6fad-4d50-bc42-1efff2bec8cb tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lazy-loading 'flavor' on Instance uuid dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 979.715089] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.562s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.719198] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.288s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 979.719616] env[69475]: DEBUG nova.objects.instance [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lazy-loading 'resources' on Instance uuid 86647493-8b2c-46bd-94d3-c973e843f778 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 979.803480] env[69475]: DEBUG oslo_vmware.api [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508549, 'name': PowerOffVM_Task, 'duration_secs': 0.317209} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.804217] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 979.804217] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 979.804217] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5ee916e1-38df-4dd5-93ce-e908abb69cde {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.895724] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 979.897045] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 979.897045] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Deleting the datastore file [datastore1] b1b04eb9-ded6-4425-8a06-0c26c086a09b {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 979.897045] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0791eec4-6cc3-4f7d-858f-0ca8d30cdc9f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.904236] env[69475]: DEBUG oslo_vmware.api [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 979.904236] env[69475]: value = "task-3508553" [ 979.904236] env[69475]: _type = "Task" [ 979.904236] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.913714] env[69475]: DEBUG oslo_vmware.api [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508553, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.977513] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508550, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.013545] env[69475]: DEBUG oslo_vmware.api [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508551, 'name': PowerOffVM_Task, 'duration_secs': 0.228137} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.014279] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 980.014279] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 980.014746] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1ba6f920-58c1-491a-a34b-8a1d59e0992c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.098088] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 980.098088] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 980.098088] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Deleting the datastore file [datastore1] 9c27dcc3-67df-46ea-947d-b2ecdaeeb003 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 980.098759] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39fe0205-b0e2-4759-9182-9d73aa9ca66e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.109860] env[69475]: DEBUG oslo_vmware.api [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 980.109860] env[69475]: value = "task-3508555" [ 980.109860] env[69475]: _type = "Task" [ 980.109860] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.125967] env[69475]: DEBUG oslo_vmware.api [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508555, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.236818] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2ea5e04e-4528-4079-aa78-af61a08c60d6 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "78430e6a-b0a3-400b-91c4-effea838274a" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 53.109s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.238650] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a742834b-b064-426c-96be-734ce620cf19 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "78430e6a-b0a3-400b-91c4-effea838274a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 27.848s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.240990] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a742834b-b064-426c-96be-734ce620cf19 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "78430e6a-b0a3-400b-91c4-effea838274a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.241296] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a742834b-b064-426c-96be-734ce620cf19 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "78430e6a-b0a3-400b-91c4-effea838274a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.003s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.241504] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a742834b-b064-426c-96be-734ce620cf19 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "78430e6a-b0a3-400b-91c4-effea838274a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 980.243570] env[69475]: INFO nova.compute.manager [None req-a742834b-b064-426c-96be-734ce620cf19 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Terminating instance [ 980.424534] env[69475]: DEBUG oslo_vmware.api [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508553, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.268694} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.428641] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 980.428942] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 980.429200] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 980.429445] env[69475]: INFO nova.compute.manager [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Took 1.16 seconds to destroy the instance on the hypervisor. [ 980.429788] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 980.430328] env[69475]: DEBUG nova.compute.manager [-] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 980.430473] env[69475]: DEBUG nova.network.neutron [-] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 980.483866] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508550, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.580269] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-21d0a7c9-6fad-4d50-bc42-1efff2bec8cb tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 980.584695] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8795c335-3ff8-4f5a-9729-15971e5fbdde {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.594466] env[69475]: DEBUG oslo_vmware.api [None req-21d0a7c9-6fad-4d50-bc42-1efff2bec8cb tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 980.594466] env[69475]: value = "task-3508556" [ 980.594466] env[69475]: _type = "Task" [ 980.594466] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.618773] env[69475]: DEBUG oslo_vmware.api [None req-21d0a7c9-6fad-4d50-bc42-1efff2bec8cb tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508556, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.631886] env[69475]: DEBUG oslo_vmware.api [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508555, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.191375} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.632837] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 980.633132] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 980.633436] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 980.633679] env[69475]: INFO nova.compute.manager [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Took 1.15 seconds to destroy the instance on the hypervisor. [ 980.633950] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 980.634213] env[69475]: DEBUG nova.compute.manager [-] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 980.634324] env[69475]: DEBUG nova.network.neutron [-] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 980.640499] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa59f737-48f2-489d-b61d-79766ee9550b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.652038] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1467ab-320f-4034-8458-270becd59566 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.697033] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d338b4b-2f61-4e0e-8556-9deef917e056 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.706704] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9790497-a2d2-4568-b111-108a3f43e7eb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.726625] env[69475]: DEBUG nova.compute.provider_tree [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.752378] env[69475]: DEBUG nova.compute.manager [None req-a742834b-b064-426c-96be-734ce620cf19 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 980.752974] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a742834b-b064-426c-96be-734ce620cf19 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 980.753728] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-945e4151-8c3d-4818-aaf2-5ca39f774e2e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.767064] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a73833-4489-458d-af31-c713a9209ab3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.804116] env[69475]: WARNING nova.virt.vmwareapi.vmops [None req-a742834b-b064-426c-96be-734ce620cf19 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 78430e6a-b0a3-400b-91c4-effea838274a could not be found. [ 980.804293] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a742834b-b064-426c-96be-734ce620cf19 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 980.805161] env[69475]: INFO nova.compute.manager [None req-a742834b-b064-426c-96be-734ce620cf19 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 980.805161] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a742834b-b064-426c-96be-734ce620cf19 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 980.805161] env[69475]: DEBUG nova.compute.manager [-] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 980.805161] env[69475]: DEBUG nova.network.neutron [-] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 980.817348] env[69475]: DEBUG nova.compute.manager [req-7a378faa-5b2e-407d-bd97-f1a60d8a15ef req-e3727e69-2d2c-4efe-a368-66c86906604b service nova] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Received event network-vif-deleted-f9a10762-ba87-425f-9623-1ffdf22c5bb4 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 980.817551] env[69475]: INFO nova.compute.manager [req-7a378faa-5b2e-407d-bd97-f1a60d8a15ef req-e3727e69-2d2c-4efe-a368-66c86906604b service nova] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Neutron deleted interface f9a10762-ba87-425f-9623-1ffdf22c5bb4; detaching it from the instance and deleting it from the info cache [ 980.817757] env[69475]: DEBUG nova.network.neutron [req-7a378faa-5b2e-407d-bd97-f1a60d8a15ef req-e3727e69-2d2c-4efe-a368-66c86906604b service nova] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.982289] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508550, 'name': CloneVM_Task} progress is 95%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.106198] env[69475]: DEBUG oslo_vmware.api [None req-21d0a7c9-6fad-4d50-bc42-1efff2bec8cb tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508556, 'name': PowerOffVM_Task, 'duration_secs': 0.360673} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.106516] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-21d0a7c9-6fad-4d50-bc42-1efff2bec8cb tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 981.106721] env[69475]: DEBUG nova.compute.manager [None req-21d0a7c9-6fad-4d50-bc42-1efff2bec8cb tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 981.107523] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3f5c108-06e4-4c6c-97fd-b37bacb2b011 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.161475] env[69475]: DEBUG nova.compute.manager [req-684e2804-ae66-4d6d-a5e4-6cf66334bf81 req-f9a3afa1-80a7-4e55-8458-aa510b2fdcdb service nova] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Received event network-vif-deleted-85c87dc2-a1dc-4c52-9f42-7af24dfa8791 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 981.161475] env[69475]: INFO nova.compute.manager [req-684e2804-ae66-4d6d-a5e4-6cf66334bf81 req-f9a3afa1-80a7-4e55-8458-aa510b2fdcdb service nova] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Neutron deleted interface 85c87dc2-a1dc-4c52-9f42-7af24dfa8791; detaching it from the instance and deleting it from the info cache [ 981.161475] env[69475]: DEBUG nova.network.neutron [req-684e2804-ae66-4d6d-a5e4-6cf66334bf81 req-f9a3afa1-80a7-4e55-8458-aa510b2fdcdb service nova] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.230147] env[69475]: DEBUG nova.scheduler.client.report [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 981.235207] env[69475]: DEBUG nova.network.neutron [-] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.253781] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Acquiring lock "c9b2f701-a73a-4561-b637-62e3ce98a44f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.254685] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Lock "c9b2f701-a73a-4561-b637-62e3ce98a44f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.254948] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Acquiring lock "c9b2f701-a73a-4561-b637-62e3ce98a44f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.255203] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Lock "c9b2f701-a73a-4561-b637-62e3ce98a44f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.255468] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Lock "c9b2f701-a73a-4561-b637-62e3ce98a44f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.258388] env[69475]: INFO nova.compute.manager [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Terminating instance [ 981.322618] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fecbbb14-c2d6-4540-9dc6-fa46e3b99418 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.336689] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f638318-abed-424b-ba0b-8a4ef7f35e82 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.376578] env[69475]: DEBUG nova.compute.manager [req-7a378faa-5b2e-407d-bd97-f1a60d8a15ef req-e3727e69-2d2c-4efe-a368-66c86906604b service nova] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Detach interface failed, port_id=f9a10762-ba87-425f-9623-1ffdf22c5bb4, reason: Instance b1b04eb9-ded6-4425-8a06-0c26c086a09b could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 981.483062] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508550, 'name': CloneVM_Task, 'duration_secs': 1.639589} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.483527] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Created linked-clone VM from snapshot [ 981.484356] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b271609-8c82-42e5-81c3-8e5e95b4fe09 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.495139] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Uploading image b5c1077f-3e5c-4446-aeac-c21a9b838476 {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 981.527148] env[69475]: DEBUG oslo_vmware.rw_handles [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 981.527148] env[69475]: value = "vm-701069" [ 981.527148] env[69475]: _type = "VirtualMachine" [ 981.527148] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 981.527148] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-187db0c0-eff6-4df8-8174-fabede6e2fc7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.537211] env[69475]: DEBUG oslo_vmware.rw_handles [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lease: (returnval){ [ 981.537211] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e9612c-9e80-ebee-9e1a-5ae73e50fa49" [ 981.537211] env[69475]: _type = "HttpNfcLease" [ 981.537211] env[69475]: } obtained for exporting VM: (result){ [ 981.537211] env[69475]: value = "vm-701069" [ 981.537211] env[69475]: _type = "VirtualMachine" [ 981.537211] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 981.537897] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the lease: (returnval){ [ 981.537897] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e9612c-9e80-ebee-9e1a-5ae73e50fa49" [ 981.537897] env[69475]: _type = "HttpNfcLease" [ 981.537897] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 981.548092] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 981.548092] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e9612c-9e80-ebee-9e1a-5ae73e50fa49" [ 981.548092] env[69475]: _type = "HttpNfcLease" [ 981.548092] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 981.581550] env[69475]: DEBUG nova.network.neutron [-] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 981.625212] env[69475]: DEBUG oslo_concurrency.lockutils [None req-21d0a7c9-6fad-4d50-bc42-1efff2bec8cb tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.069s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.666522] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1bb0053a-5c7f-4302-891d-318e394b7ff9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.678613] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d98f9f-6e40-455d-9528-d2d77c4c81df {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.721648] env[69475]: DEBUG nova.compute.manager [req-684e2804-ae66-4d6d-a5e4-6cf66334bf81 req-f9a3afa1-80a7-4e55-8458-aa510b2fdcdb service nova] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Detach interface failed, port_id=85c87dc2-a1dc-4c52-9f42-7af24dfa8791, reason: Instance 9c27dcc3-67df-46ea-947d-b2ecdaeeb003 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 981.737246] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.017s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.740070] env[69475]: DEBUG oslo_concurrency.lockutils [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.041s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.742103] env[69475]: DEBUG nova.objects.instance [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Lazy-loading 'resources' on Instance uuid d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.743451] env[69475]: INFO nova.compute.manager [-] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Took 1.31 seconds to deallocate network for instance. [ 981.763076] env[69475]: DEBUG nova.compute.manager [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 981.763626] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 981.764611] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee7b5f8-5536-4a10-8080-45001252c884 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.768911] env[69475]: INFO nova.scheduler.client.report [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Deleted allocations for instance 86647493-8b2c-46bd-94d3-c973e843f778 [ 981.779201] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 981.779584] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49c151df-3467-49ff-8bbd-13b80da204ec {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.788503] env[69475]: DEBUG oslo_vmware.api [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Waiting for the task: (returnval){ [ 981.788503] env[69475]: value = "task-3508558" [ 981.788503] env[69475]: _type = "Task" [ 981.788503] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.798657] env[69475]: DEBUG oslo_vmware.api [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': task-3508558, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.879903] env[69475]: DEBUG nova.network.neutron [-] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.048572] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 982.048572] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e9612c-9e80-ebee-9e1a-5ae73e50fa49" [ 982.048572] env[69475]: _type = "HttpNfcLease" [ 982.048572] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 982.048994] env[69475]: DEBUG oslo_vmware.rw_handles [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 982.048994] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e9612c-9e80-ebee-9e1a-5ae73e50fa49" [ 982.048994] env[69475]: _type = "HttpNfcLease" [ 982.048994] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 982.049953] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833f1aad-5a66-4913-a3b7-4d5c6625e81a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.061197] env[69475]: DEBUG oslo_vmware.rw_handles [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52689aad-769f-35b7-4053-04263b50e17d/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 982.061470] env[69475]: DEBUG oslo_vmware.rw_handles [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52689aad-769f-35b7-4053-04263b50e17d/disk-0.vmdk for reading. {{(pid=69475) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 982.125745] env[69475]: INFO nova.compute.manager [-] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Took 1.49 seconds to deallocate network for instance. [ 982.224036] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-70784533-ef28-408b-b41a-1e204d2c646d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.229709] env[69475]: INFO nova.compute.manager [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Rebuilding instance [ 982.251059] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.280817] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51ba70f5-1de0-4721-96af-600416e4085a tempest-ServerRescueTestJSON-575279690 tempest-ServerRescueTestJSON-575279690-project-member] Lock "86647493-8b2c-46bd-94d3-c973e843f778" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.892s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.301775] env[69475]: DEBUG nova.compute.manager [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 982.303325] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3a484b-a86c-413b-8513-582fe39251fb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.309810] env[69475]: DEBUG oslo_vmware.api [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': task-3508558, 'name': PowerOffVM_Task, 'duration_secs': 0.329792} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.310440] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 982.310681] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 982.310992] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a592a32c-7b64-49e6-ba27-33c6c3a884cb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.382290] env[69475]: INFO nova.compute.manager [-] [instance: 78430e6a-b0a3-400b-91c4-effea838274a] Took 1.58 seconds to deallocate network for instance. [ 982.390793] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 982.391016] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 982.391332] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Deleting the datastore file [datastore1] c9b2f701-a73a-4561-b637-62e3ce98a44f {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 982.391606] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-08003e9d-c86f-4f97-bc9f-894c9db5f675 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.401096] env[69475]: DEBUG oslo_vmware.api [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Waiting for the task: (returnval){ [ 982.401096] env[69475]: value = "task-3508560" [ 982.401096] env[69475]: _type = "Task" [ 982.401096] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.412029] env[69475]: DEBUG oslo_vmware.api [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': task-3508560, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.633040] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 982.637014] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f27439-f654-4c71-b425-0b1aac515929 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.646293] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572d0605-ad19-4e6d-9667-7c9e5e28e38c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.683832] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59a29a5f-893a-4c39-a629-f3768b821db6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.695817] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec2dc38-8e10-46db-b815-20ff753d00b7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.712634] env[69475]: DEBUG nova.compute.provider_tree [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.919442] env[69475]: DEBUG oslo_vmware.api [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Task: {'id': task-3508560, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176644} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.919804] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 982.919989] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 982.921281] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 982.921281] env[69475]: INFO nova.compute.manager [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Took 1.16 seconds to destroy the instance on the hypervisor. [ 982.921281] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 982.921281] env[69475]: DEBUG nova.compute.manager [-] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 982.921281] env[69475]: DEBUG nova.network.neutron [-] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 983.215975] env[69475]: DEBUG nova.scheduler.client.report [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 983.323979] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 983.324717] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-992783e1-52ba-4696-b820-8c69deda4ff8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.338842] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 983.338842] env[69475]: value = "task-3508561" [ 983.338842] env[69475]: _type = "Task" [ 983.338842] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.346227] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508561, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.418603] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a742834b-b064-426c-96be-734ce620cf19 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "78430e6a-b0a3-400b-91c4-effea838274a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.180s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.725459] env[69475]: DEBUG oslo_concurrency.lockutils [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.982s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 983.728712] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.853s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 983.732649] env[69475]: INFO nova.compute.claims [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 983.759925] env[69475]: INFO nova.scheduler.client.report [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Deleted allocations for instance d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3 [ 983.852018] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] VM already powered off {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 983.852801] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 983.853211] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c21ca7ba-9925-45a4-ac42-93b93b651995 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.862421] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 983.863077] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aafbdc8c-e896-4c25-8390-1fa7ec8b3ae8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.946949] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 983.949952] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 983.949952] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleting the datastore file [datastore1] dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 983.949952] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-339ba425-1bb6-4471-a9db-660f0746df7c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.958693] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 983.958693] env[69475]: value = "task-3508563" [ 983.958693] env[69475]: _type = "Task" [ 983.958693] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.970579] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508563, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.175489] env[69475]: DEBUG nova.network.neutron [-] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 984.271862] env[69475]: DEBUG oslo_concurrency.lockutils [None req-018d2012-a4e6-4024-bc57-deee6264d280 tempest-InstanceActionsTestJSON-1259809819 tempest-InstanceActionsTestJSON-1259809819-project-member] Lock "d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.126s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 984.470813] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508563, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161092} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.471448] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 984.471850] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 984.472946] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 984.681173] env[69475]: INFO nova.compute.manager [-] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Took 1.76 seconds to deallocate network for instance. [ 984.999493] env[69475]: DEBUG nova.compute.manager [req-08f2655e-bae0-47a6-96be-a4ae72295e28 req-b3358bb9-4171-4617-a02c-d3ea8226c373 service nova] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Received event network-vif-deleted-d77fc39a-89ae-47b6-8770-a620acc4eab3 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 985.115839] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478074f0-9d47-4e9b-93bf-8bae5bb4fa87 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.128078] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b769b0-3c95-40ff-9d62-367520ed60cf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.162284] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-474270a3-8b72-48f0-b8f6-d346d8726298 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.171000] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258d1fd4-6ba6-49a4-9252-e7711ffe1be9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.187836] env[69475]: DEBUG nova.compute.provider_tree [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 985.191717] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.486375] env[69475]: DEBUG oslo_concurrency.lockutils [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "8963b50c-29ca-49fd-8289-1e1b7583ca25" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 985.486913] env[69475]: DEBUG oslo_concurrency.lockutils [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "8963b50c-29ca-49fd-8289-1e1b7583ca25" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.526328] env[69475]: DEBUG nova.virt.hardware [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 985.526593] env[69475]: DEBUG nova.virt.hardware [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 985.526753] env[69475]: DEBUG nova.virt.hardware [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 985.526932] env[69475]: DEBUG nova.virt.hardware [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 985.527591] env[69475]: DEBUG nova.virt.hardware [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 985.527591] env[69475]: DEBUG nova.virt.hardware [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 985.527914] env[69475]: DEBUG nova.virt.hardware [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 985.528171] env[69475]: DEBUG nova.virt.hardware [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 985.528726] env[69475]: DEBUG nova.virt.hardware [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 985.528726] env[69475]: DEBUG nova.virt.hardware [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 985.528726] env[69475]: DEBUG nova.virt.hardware [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 985.530316] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b2b2001-63fa-4084-8a01-30030b13e4f3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.539243] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb4b69f-cd11-4b85-9510-df63325afcaa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.555029] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c0:81:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee9ce73d-4ee8-4b28-b7d3-3a5735039627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'faddd0cb-2c06-43e5-adff-b74e725a50ba', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 985.563582] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 985.563907] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 985.564168] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b17d0b2d-6938-4589-bc81-c6387c89aaff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.587663] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 985.587663] env[69475]: value = "task-3508564" [ 985.587663] env[69475]: _type = "Task" [ 985.587663] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.597145] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508564, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.691739] env[69475]: DEBUG nova.scheduler.client.report [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 985.992815] env[69475]: DEBUG nova.compute.manager [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 986.101081] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508564, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.197069] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.468s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 986.197628] env[69475]: DEBUG nova.compute.manager [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 986.201826] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.812s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 986.205980] env[69475]: INFO nova.compute.claims [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 986.514635] env[69475]: DEBUG oslo_concurrency.lockutils [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 986.600353] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508564, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.708359] env[69475]: DEBUG nova.compute.utils [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 986.709676] env[69475]: DEBUG nova.compute.manager [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 986.709896] env[69475]: DEBUG nova.network.neutron [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 986.766617] env[69475]: DEBUG nova.policy [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82fb5348c4484685ba3d0589310fb68a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd25a22195d0c4370a481a242a18f430a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 987.045383] env[69475]: DEBUG nova.network.neutron [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Successfully created port: 16c03d8c-ab68-4516-b540-a94b1d4caf49 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 987.101700] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508564, 'name': CreateVM_Task, 'duration_secs': 1.423858} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.102072] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 987.102698] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.102824] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 987.103265] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 987.103510] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6787121-5e5a-4f0a-87e7-9e396675168a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.109583] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 987.109583] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52bea9a7-3e42-9e8a-f237-158e1616a903" [ 987.109583] env[69475]: _type = "Task" [ 987.109583] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.118871] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52bea9a7-3e42-9e8a-f237-158e1616a903, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.216822] env[69475]: DEBUG nova.compute.manager [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 987.571438] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1118cc9-421c-4b19-9452-5360a3f9059f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.580901] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af1de4ee-31f0-4860-97e8-7f1e150d8eb1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.615573] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f0356d-a38e-4b5f-a596-7bc3f002d342 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.625895] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52bea9a7-3e42-9e8a-f237-158e1616a903, 'name': SearchDatastore_Task, 'duration_secs': 0.01365} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.626311] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 987.626556] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 987.626790] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.626933] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 987.627126] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 987.628346] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06ba8ed-cbc9-4069-a21f-a486fd9d1c13 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.632022] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9102a339-a9da-42a8-b0fc-750aebff59af {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.645887] env[69475]: DEBUG nova.compute.provider_tree [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 987.648125] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 987.648308] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 987.649259] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52705508-a908-4cf3-83c8-6ac6c7ebe099 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.655852] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 987.655852] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5268f3aa-2d42-c8cf-a841-b1bfbb2919c8" [ 987.655852] env[69475]: _type = "Task" [ 987.655852] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.665919] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5268f3aa-2d42-c8cf-a841-b1bfbb2919c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.939281] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Acquiring lock "44bcaa36-ecd9-448b-b589-7c32066ede1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.939281] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Lock "44bcaa36-ecd9-448b-b589-7c32066ede1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.151185] env[69475]: DEBUG nova.scheduler.client.report [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 988.167999] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5268f3aa-2d42-c8cf-a841-b1bfbb2919c8, 'name': SearchDatastore_Task, 'duration_secs': 0.010245} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.169468] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22d4d28b-f5a1-4ef0-a567-e286d0db7102 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.176674] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 988.176674] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52de9676-4ded-1bf5-688b-5ab8d64dc52c" [ 988.176674] env[69475]: _type = "Task" [ 988.176674] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.186871] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52de9676-4ded-1bf5-688b-5ab8d64dc52c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.230606] env[69475]: DEBUG nova.compute.manager [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 988.257523] env[69475]: DEBUG nova.virt.hardware [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 988.257795] env[69475]: DEBUG nova.virt.hardware [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 988.257990] env[69475]: DEBUG nova.virt.hardware [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 988.258192] env[69475]: DEBUG nova.virt.hardware [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 988.258335] env[69475]: DEBUG nova.virt.hardware [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 988.258482] env[69475]: DEBUG nova.virt.hardware [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 988.258693] env[69475]: DEBUG nova.virt.hardware [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 988.258850] env[69475]: DEBUG nova.virt.hardware [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 988.259053] env[69475]: DEBUG nova.virt.hardware [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 988.259228] env[69475]: DEBUG nova.virt.hardware [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 988.259403] env[69475]: DEBUG nova.virt.hardware [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 988.260310] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3c24732-9d26-4c54-ae1d-c8f42184ff64 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.269902] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d80f76c-180c-40a5-9a46-89af4085f59e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.442413] env[69475]: DEBUG nova.compute.manager [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 988.449316] env[69475]: DEBUG nova.compute.manager [req-14d516bd-b883-472f-be8d-fffb661f9495 req-565fef8c-5a52-4d6d-8939-030d039f4f0f service nova] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Received event network-vif-plugged-16c03d8c-ab68-4516-b540-a94b1d4caf49 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 988.449316] env[69475]: DEBUG oslo_concurrency.lockutils [req-14d516bd-b883-472f-be8d-fffb661f9495 req-565fef8c-5a52-4d6d-8939-030d039f4f0f service nova] Acquiring lock "211f895a-bba5-4f10-9296-0d461af49f98-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.449563] env[69475]: DEBUG oslo_concurrency.lockutils [req-14d516bd-b883-472f-be8d-fffb661f9495 req-565fef8c-5a52-4d6d-8939-030d039f4f0f service nova] Lock "211f895a-bba5-4f10-9296-0d461af49f98-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.449614] env[69475]: DEBUG oslo_concurrency.lockutils [req-14d516bd-b883-472f-be8d-fffb661f9495 req-565fef8c-5a52-4d6d-8939-030d039f4f0f service nova] Lock "211f895a-bba5-4f10-9296-0d461af49f98-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.449835] env[69475]: DEBUG nova.compute.manager [req-14d516bd-b883-472f-be8d-fffb661f9495 req-565fef8c-5a52-4d6d-8939-030d039f4f0f service nova] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] No waiting events found dispatching network-vif-plugged-16c03d8c-ab68-4516-b540-a94b1d4caf49 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 988.450440] env[69475]: WARNING nova.compute.manager [req-14d516bd-b883-472f-be8d-fffb661f9495 req-565fef8c-5a52-4d6d-8939-030d039f4f0f service nova] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Received unexpected event network-vif-plugged-16c03d8c-ab68-4516-b540-a94b1d4caf49 for instance with vm_state building and task_state spawning. [ 988.548577] env[69475]: DEBUG nova.network.neutron [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Successfully updated port: 16c03d8c-ab68-4516-b540-a94b1d4caf49 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 988.657261] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.456s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.657795] env[69475]: DEBUG nova.compute.manager [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 988.660403] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 22.818s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.660643] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.660744] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69475) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 988.661026] env[69475]: DEBUG oslo_concurrency.lockutils [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.666s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.662525] env[69475]: INFO nova.compute.claims [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 988.665729] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d5f8df-4836-4531-b3fb-15c9b97b5f8c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.676725] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bb19027-036a-496c-9acd-18a8e7086e7a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.690257] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52de9676-4ded-1bf5-688b-5ab8d64dc52c, 'name': SearchDatastore_Task, 'duration_secs': 0.010865} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.698622] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 988.698787] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac/dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 988.699111] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0647f6a5-f88b-4fe1-9d33-febffe601fc6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.701647] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac2ae71c-e8fa-4bf7-a1b9-7a6be1b073f3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.710685] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485ba3fe-1f91-4c9a-bda8-002126b38535 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.715423] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 988.715423] env[69475]: value = "task-3508565" [ 988.715423] env[69475]: _type = "Task" [ 988.715423] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.743668] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178406MB free_disk=89GB free_vcpus=48 pci_devices=None {{(pid=69475) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 988.743845] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.750143] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508565, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.970498] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 989.052962] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "refresh_cache-211f895a-bba5-4f10-9296-0d461af49f98" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.052962] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquired lock "refresh_cache-211f895a-bba5-4f10-9296-0d461af49f98" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.052962] env[69475]: DEBUG nova.network.neutron [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 989.167311] env[69475]: DEBUG nova.compute.utils [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 989.168600] env[69475]: DEBUG nova.compute.manager [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 989.168782] env[69475]: DEBUG nova.network.neutron [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 989.221937] env[69475]: DEBUG nova.policy [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '50223677b1b84004ad2ae335882b0bf2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52941494ff1643f6bb75cc1320a86b88', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 989.227023] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508565, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494336} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.227992] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac/dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 989.227992] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 989.228205] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d66d550a-f2a5-46b1-b2cd-f1d759de0c64 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.238447] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 989.238447] env[69475]: value = "task-3508566" [ 989.238447] env[69475]: _type = "Task" [ 989.238447] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.246858] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508566, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.492017] env[69475]: DEBUG nova.network.neutron [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Successfully created port: 7dcffc2d-4fd4-4b04-9913-da880fac60ed {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 989.595050] env[69475]: DEBUG nova.network.neutron [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 989.673888] env[69475]: DEBUG nova.compute.manager [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 989.751512] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508566, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074256} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.757054] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 989.758606] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be75d658-229a-44cc-84b0-097c1b9d40a5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.783396] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac/dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 989.786396] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e022b256-a63c-4618-a6c2-d56fa7dd35c8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.802699] env[69475]: DEBUG nova.network.neutron [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Updating instance_info_cache with network_info: [{"id": "16c03d8c-ab68-4516-b540-a94b1d4caf49", "address": "fa:16:3e:3a:8e:76", "network": {"id": "77196001-28c0-48c7-924d-a11c93289ae5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-829999270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d25a22195d0c4370a481a242a18f430a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16c03d8c-ab", "ovs_interfaceid": "16c03d8c-ab68-4516-b540-a94b1d4caf49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.815095] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 989.815095] env[69475]: value = "task-3508567" [ 989.815095] env[69475]: _type = "Task" [ 989.815095] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.825548] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508567, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.035926] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0aa57d0-b312-4f4f-b098-a1896175f6c7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.044585] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae56d3f6-ddbd-48f3-b757-011eca22e2eb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.078366] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d1b4cc-5e82-4f03-b0a3-ad09b29008c9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.088671] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072f1588-a88f-4da2-907f-2be5c63d5278 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.103232] env[69475]: DEBUG nova.compute.provider_tree [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 990.108939] env[69475]: DEBUG oslo_vmware.rw_handles [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52689aad-769f-35b7-4053-04263b50e17d/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 990.109847] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c11238c-ef6b-4fa8-8a3a-4e88aaaf518f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.116940] env[69475]: DEBUG oslo_vmware.rw_handles [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52689aad-769f-35b7-4053-04263b50e17d/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 990.117121] env[69475]: ERROR oslo_vmware.rw_handles [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52689aad-769f-35b7-4053-04263b50e17d/disk-0.vmdk due to incomplete transfer. [ 990.117338] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ac7de04f-3b85-4da3-b813-aae857bfeea2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.125664] env[69475]: DEBUG oslo_vmware.rw_handles [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52689aad-769f-35b7-4053-04263b50e17d/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 990.125862] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Uploaded image b5c1077f-3e5c-4446-aeac-c21a9b838476 to the Glance image server {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 990.128013] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 990.128267] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0602e2f2-5867-4fe1-8f9c-eec6860e9a0e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.136254] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 990.136254] env[69475]: value = "task-3508568" [ 990.136254] env[69475]: _type = "Task" [ 990.136254] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.144485] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508568, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.305727] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Releasing lock "refresh_cache-211f895a-bba5-4f10-9296-0d461af49f98" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.306093] env[69475]: DEBUG nova.compute.manager [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Instance network_info: |[{"id": "16c03d8c-ab68-4516-b540-a94b1d4caf49", "address": "fa:16:3e:3a:8e:76", "network": {"id": "77196001-28c0-48c7-924d-a11c93289ae5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-829999270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d25a22195d0c4370a481a242a18f430a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16c03d8c-ab", "ovs_interfaceid": "16c03d8c-ab68-4516-b540-a94b1d4caf49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 990.306613] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:8e:76', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0549820d-5649-40bc-ad6e-9ae27b384d90', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '16c03d8c-ab68-4516-b540-a94b1d4caf49', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 990.314964] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 990.315236] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 990.315466] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-406727ca-2a84-400d-bd50-539d772eb93d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.342177] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508567, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.343904] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 990.343904] env[69475]: value = "task-3508569" [ 990.343904] env[69475]: _type = "Task" [ 990.343904] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.352546] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508569, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.498063] env[69475]: DEBUG nova.compute.manager [req-de4636fb-f432-45ea-a2ce-92e7e232b00e req-57e1353f-f8ea-4ba0-ac56-ecf47aa94d1c service nova] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Received event network-changed-16c03d8c-ab68-4516-b540-a94b1d4caf49 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 990.498211] env[69475]: DEBUG nova.compute.manager [req-de4636fb-f432-45ea-a2ce-92e7e232b00e req-57e1353f-f8ea-4ba0-ac56-ecf47aa94d1c service nova] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Refreshing instance network info cache due to event network-changed-16c03d8c-ab68-4516-b540-a94b1d4caf49. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 990.498376] env[69475]: DEBUG oslo_concurrency.lockutils [req-de4636fb-f432-45ea-a2ce-92e7e232b00e req-57e1353f-f8ea-4ba0-ac56-ecf47aa94d1c service nova] Acquiring lock "refresh_cache-211f895a-bba5-4f10-9296-0d461af49f98" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.498518] env[69475]: DEBUG oslo_concurrency.lockutils [req-de4636fb-f432-45ea-a2ce-92e7e232b00e req-57e1353f-f8ea-4ba0-ac56-ecf47aa94d1c service nova] Acquired lock "refresh_cache-211f895a-bba5-4f10-9296-0d461af49f98" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.498677] env[69475]: DEBUG nova.network.neutron [req-de4636fb-f432-45ea-a2ce-92e7e232b00e req-57e1353f-f8ea-4ba0-ac56-ecf47aa94d1c service nova] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Refreshing network info cache for port 16c03d8c-ab68-4516-b540-a94b1d4caf49 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 990.606824] env[69475]: DEBUG nova.scheduler.client.report [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 990.650757] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508568, 'name': Destroy_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.689397] env[69475]: DEBUG nova.compute.manager [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 990.713516] env[69475]: DEBUG nova.virt.hardware [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 990.713825] env[69475]: DEBUG nova.virt.hardware [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 990.714035] env[69475]: DEBUG nova.virt.hardware [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 990.714318] env[69475]: DEBUG nova.virt.hardware [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 990.714529] env[69475]: DEBUG nova.virt.hardware [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 990.714707] env[69475]: DEBUG nova.virt.hardware [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 990.714938] env[69475]: DEBUG nova.virt.hardware [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 990.715146] env[69475]: DEBUG nova.virt.hardware [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 990.715356] env[69475]: DEBUG nova.virt.hardware [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 990.715557] env[69475]: DEBUG nova.virt.hardware [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 990.715756] env[69475]: DEBUG nova.virt.hardware [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 990.716868] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8489f763-c37f-409d-907f-1e3b62b98f9c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.725952] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c8cf324-ba9e-4eb0-935d-69b2ae732fac {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.841757] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508567, 'name': ReconfigVM_Task, 'duration_secs': 0.7429} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.842090] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Reconfigured VM instance instance-00000057 to attach disk [datastore1] dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac/dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 990.842817] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4257930-9b7c-41c2-98f4-3bbad53472ac {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.856950] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508569, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.858094] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 990.858094] env[69475]: value = "task-3508570" [ 990.858094] env[69475]: _type = "Task" [ 990.858094] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.866757] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508570, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.984051] env[69475]: DEBUG nova.network.neutron [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Successfully updated port: 7dcffc2d-4fd4-4b04-9913-da880fac60ed {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 991.112878] env[69475]: DEBUG oslo_concurrency.lockutils [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 991.113466] env[69475]: DEBUG nova.compute.manager [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 991.115944] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.548s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 991.116179] env[69475]: DEBUG nova.objects.instance [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lazy-loading 'resources' on Instance uuid eadfea6c-3fce-4f54-b889-d994d61ec14f {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 991.147703] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508568, 'name': Destroy_Task, 'duration_secs': 0.755136} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.147938] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Destroyed the VM [ 991.148189] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 991.148478] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-13ab206d-72e1-4b4f-89b2-2e9ab07d24e5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.155386] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 991.155386] env[69475]: value = "task-3508571" [ 991.155386] env[69475]: _type = "Task" [ 991.155386] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.165618] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508571, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.196620] env[69475]: DEBUG nova.network.neutron [req-de4636fb-f432-45ea-a2ce-92e7e232b00e req-57e1353f-f8ea-4ba0-ac56-ecf47aa94d1c service nova] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Updated VIF entry in instance network info cache for port 16c03d8c-ab68-4516-b540-a94b1d4caf49. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 991.197281] env[69475]: DEBUG nova.network.neutron [req-de4636fb-f432-45ea-a2ce-92e7e232b00e req-57e1353f-f8ea-4ba0-ac56-ecf47aa94d1c service nova] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Updating instance_info_cache with network_info: [{"id": "16c03d8c-ab68-4516-b540-a94b1d4caf49", "address": "fa:16:3e:3a:8e:76", "network": {"id": "77196001-28c0-48c7-924d-a11c93289ae5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-829999270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d25a22195d0c4370a481a242a18f430a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16c03d8c-ab", "ovs_interfaceid": "16c03d8c-ab68-4516-b540-a94b1d4caf49", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.354774] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508569, 'name': CreateVM_Task, 'duration_secs': 0.57923} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.355035] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 991.355678] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.355842] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.356283] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 991.356435] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fc963a9-f49c-4ccb-9774-0e38910ffb9b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.362533] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 991.362533] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d15b8d-b238-c4a5-e783-3e820affea5b" [ 991.362533] env[69475]: _type = "Task" [ 991.362533] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.369268] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508570, 'name': Rename_Task, 'duration_secs': 0.163011} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.369426] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 991.369557] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0b8c7e08-6881-4b7d-8253-70bcd00462c1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.374943] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d15b8d-b238-c4a5-e783-3e820affea5b, 'name': SearchDatastore_Task, 'duration_secs': 0.010859} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.375450] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.375678] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 991.375908] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.376200] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.376240] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 991.376798] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae924e65-8105-463b-9f78-50adb8ce81b2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.379468] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 991.379468] env[69475]: value = "task-3508572" [ 991.379468] env[69475]: _type = "Task" [ 991.379468] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.387023] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508572, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.388047] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 991.388231] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 991.388898] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3aedb4cf-ae05-4521-b51f-d1e4497f0633 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.393246] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 991.393246] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52362ce8-05a1-b4ff-cb7a-fe2f164a361a" [ 991.393246] env[69475]: _type = "Task" [ 991.393246] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.401392] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52362ce8-05a1-b4ff-cb7a-fe2f164a361a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.486923] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "refresh_cache-e8657a44-d786-4fa6-b39c-28fc71415ce8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.487090] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "refresh_cache-e8657a44-d786-4fa6-b39c-28fc71415ce8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 991.487249] env[69475]: DEBUG nova.network.neutron [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 991.621155] env[69475]: DEBUG nova.compute.utils [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 991.626083] env[69475]: DEBUG nova.compute.manager [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 991.626083] env[69475]: DEBUG nova.network.neutron [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 991.667260] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508571, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.668674] env[69475]: DEBUG nova.policy [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d4323c195b24245a75109e165f900f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4e6dd9c026624896ae4de7fab35720d8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 991.699726] env[69475]: DEBUG oslo_concurrency.lockutils [req-de4636fb-f432-45ea-a2ce-92e7e232b00e req-57e1353f-f8ea-4ba0-ac56-ecf47aa94d1c service nova] Releasing lock "refresh_cache-211f895a-bba5-4f10-9296-0d461af49f98" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.890155] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508572, 'name': PowerOnVM_Task, 'duration_secs': 0.503128} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.890432] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 991.890637] env[69475]: DEBUG nova.compute.manager [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 991.891434] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08f4dd9-75f1-434f-920a-e7f5a325b53b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.918595] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52362ce8-05a1-b4ff-cb7a-fe2f164a361a, 'name': SearchDatastore_Task, 'duration_secs': 0.009794} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.918595] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0055765-907f-4ee6-9ec2-27113bc4771d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.922226] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 991.922226] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ae61d3-5f80-e4fa-a3ab-71792f0b6081" [ 991.922226] env[69475]: _type = "Task" [ 991.922226] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.937854] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ae61d3-5f80-e4fa-a3ab-71792f0b6081, 'name': SearchDatastore_Task, 'duration_secs': 0.009767} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.941447] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 991.941767] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 211f895a-bba5-4f10-9296-0d461af49f98/211f895a-bba5-4f10-9296-0d461af49f98.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 991.942239] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dc864967-dbcf-4413-b426-3dddccf4c574 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.948632] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 991.948632] env[69475]: value = "task-3508573" [ 991.948632] env[69475]: _type = "Task" [ 991.948632] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.956907] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508573, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.971156] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc35fa3b-5d78-4a8b-8863-65656e6fd91b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.974397] env[69475]: DEBUG nova.network.neutron [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Successfully created port: 886a1220-72c0-4395-8f70-1ab633f634ff {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 991.981894] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-604e79fb-56af-4d5f-a168-c056efa0b745 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.014410] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac6ec917-eabb-409a-bb2b-3c6eac9fb5b0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.022749] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf99f0ab-e943-453b-a9d1-ab541334a389 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.038487] env[69475]: DEBUG nova.compute.provider_tree [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 992.044470] env[69475]: DEBUG nova.network.neutron [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 992.127237] env[69475]: DEBUG nova.compute.manager [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 992.168506] env[69475]: DEBUG oslo_vmware.api [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508571, 'name': RemoveSnapshot_Task, 'duration_secs': 0.57658} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.169456] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 992.169715] env[69475]: INFO nova.compute.manager [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Took 14.81 seconds to snapshot the instance on the hypervisor. [ 992.186885] env[69475]: DEBUG nova.network.neutron [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Updating instance_info_cache with network_info: [{"id": "7dcffc2d-4fd4-4b04-9913-da880fac60ed", "address": "fa:16:3e:61:e8:6a", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dcffc2d-4f", "ovs_interfaceid": "7dcffc2d-4fd4-4b04-9913-da880fac60ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.416523] env[69475]: INFO nova.compute.manager [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] bringing vm to original state: 'stopped' [ 992.460552] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508573, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.541449] env[69475]: DEBUG nova.scheduler.client.report [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 992.666074] env[69475]: DEBUG nova.compute.manager [req-b1e0cca0-e736-40f1-a55d-85a0f9c11a1b req-8ac54cd1-98c0-4d6d-abb1-920decaf612b service nova] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Received event network-vif-plugged-7dcffc2d-4fd4-4b04-9913-da880fac60ed {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 992.666303] env[69475]: DEBUG oslo_concurrency.lockutils [req-b1e0cca0-e736-40f1-a55d-85a0f9c11a1b req-8ac54cd1-98c0-4d6d-abb1-920decaf612b service nova] Acquiring lock "e8657a44-d786-4fa6-b39c-28fc71415ce8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 992.666527] env[69475]: DEBUG oslo_concurrency.lockutils [req-b1e0cca0-e736-40f1-a55d-85a0f9c11a1b req-8ac54cd1-98c0-4d6d-abb1-920decaf612b service nova] Lock "e8657a44-d786-4fa6-b39c-28fc71415ce8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.666973] env[69475]: DEBUG oslo_concurrency.lockutils [req-b1e0cca0-e736-40f1-a55d-85a0f9c11a1b req-8ac54cd1-98c0-4d6d-abb1-920decaf612b service nova] Lock "e8657a44-d786-4fa6-b39c-28fc71415ce8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.667210] env[69475]: DEBUG nova.compute.manager [req-b1e0cca0-e736-40f1-a55d-85a0f9c11a1b req-8ac54cd1-98c0-4d6d-abb1-920decaf612b service nova] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] No waiting events found dispatching network-vif-plugged-7dcffc2d-4fd4-4b04-9913-da880fac60ed {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 992.667461] env[69475]: WARNING nova.compute.manager [req-b1e0cca0-e736-40f1-a55d-85a0f9c11a1b req-8ac54cd1-98c0-4d6d-abb1-920decaf612b service nova] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Received unexpected event network-vif-plugged-7dcffc2d-4fd4-4b04-9913-da880fac60ed for instance with vm_state building and task_state spawning. [ 992.667593] env[69475]: DEBUG nova.compute.manager [req-b1e0cca0-e736-40f1-a55d-85a0f9c11a1b req-8ac54cd1-98c0-4d6d-abb1-920decaf612b service nova] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Received event network-changed-7dcffc2d-4fd4-4b04-9913-da880fac60ed {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 992.667751] env[69475]: DEBUG nova.compute.manager [req-b1e0cca0-e736-40f1-a55d-85a0f9c11a1b req-8ac54cd1-98c0-4d6d-abb1-920decaf612b service nova] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Refreshing instance network info cache due to event network-changed-7dcffc2d-4fd4-4b04-9913-da880fac60ed. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 992.667918] env[69475]: DEBUG oslo_concurrency.lockutils [req-b1e0cca0-e736-40f1-a55d-85a0f9c11a1b req-8ac54cd1-98c0-4d6d-abb1-920decaf612b service nova] Acquiring lock "refresh_cache-e8657a44-d786-4fa6-b39c-28fc71415ce8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.689070] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "refresh_cache-e8657a44-d786-4fa6-b39c-28fc71415ce8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.689365] env[69475]: DEBUG nova.compute.manager [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Instance network_info: |[{"id": "7dcffc2d-4fd4-4b04-9913-da880fac60ed", "address": "fa:16:3e:61:e8:6a", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dcffc2d-4f", "ovs_interfaceid": "7dcffc2d-4fd4-4b04-9913-da880fac60ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 992.689623] env[69475]: DEBUG oslo_concurrency.lockutils [req-b1e0cca0-e736-40f1-a55d-85a0f9c11a1b req-8ac54cd1-98c0-4d6d-abb1-920decaf612b service nova] Acquired lock "refresh_cache-e8657a44-d786-4fa6-b39c-28fc71415ce8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 992.689813] env[69475]: DEBUG nova.network.neutron [req-b1e0cca0-e736-40f1-a55d-85a0f9c11a1b req-8ac54cd1-98c0-4d6d-abb1-920decaf612b service nova] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Refreshing network info cache for port 7dcffc2d-4fd4-4b04-9913-da880fac60ed {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 992.691014] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:61:e8:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7dcffc2d-4fd4-4b04-9913-da880fac60ed', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 992.699043] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 992.701883] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 992.702564] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-651009cf-826e-46ad-b01f-e8d32850c8e9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.726775] env[69475]: DEBUG nova.compute.manager [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Found 3 images (rotation: 2) {{(pid=69475) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 992.726978] env[69475]: DEBUG nova.compute.manager [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Rotating out 1 backups {{(pid=69475) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 992.727157] env[69475]: DEBUG nova.compute.manager [None req-25819551-1e51-4751-80b8-c671016d2c3e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Deleting image 71a76a24-1a96-4056-b949-89270aaca820 {{(pid=69475) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 992.730448] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 992.730448] env[69475]: value = "task-3508574" [ 992.730448] env[69475]: _type = "Task" [ 992.730448] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.738918] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508574, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.960138] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508573, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.627486} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.960442] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 211f895a-bba5-4f10-9296-0d461af49f98/211f895a-bba5-4f10-9296-0d461af49f98.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 992.960933] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 992.961250] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-294b6ff1-4a64-4d56-b999-112b20e3e46e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.968561] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 992.968561] env[69475]: value = "task-3508575" [ 992.968561] env[69475]: _type = "Task" [ 992.968561] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.976441] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508575, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.049173] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.933s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.051999] env[69475]: DEBUG oslo_concurrency.lockutils [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.743s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.052291] env[69475]: DEBUG nova.objects.instance [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Lazy-loading 'resources' on Instance uuid 02ba199b-a7dc-421c-a14a-b562da275377 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 993.061434] env[69475]: DEBUG nova.network.neutron [req-b1e0cca0-e736-40f1-a55d-85a0f9c11a1b req-8ac54cd1-98c0-4d6d-abb1-920decaf612b service nova] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Updated VIF entry in instance network info cache for port 7dcffc2d-4fd4-4b04-9913-da880fac60ed. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 993.061821] env[69475]: DEBUG nova.network.neutron [req-b1e0cca0-e736-40f1-a55d-85a0f9c11a1b req-8ac54cd1-98c0-4d6d-abb1-920decaf612b service nova] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Updating instance_info_cache with network_info: [{"id": "7dcffc2d-4fd4-4b04-9913-da880fac60ed", "address": "fa:16:3e:61:e8:6a", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7dcffc2d-4f", "ovs_interfaceid": "7dcffc2d-4fd4-4b04-9913-da880fac60ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.085950] env[69475]: INFO nova.scheduler.client.report [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Deleted allocations for instance eadfea6c-3fce-4f54-b889-d994d61ec14f [ 993.137980] env[69475]: DEBUG nova.compute.manager [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 993.158640] env[69475]: DEBUG nova.virt.hardware [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 993.158964] env[69475]: DEBUG nova.virt.hardware [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 993.159075] env[69475]: DEBUG nova.virt.hardware [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 993.159235] env[69475]: DEBUG nova.virt.hardware [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 993.159380] env[69475]: DEBUG nova.virt.hardware [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 993.159520] env[69475]: DEBUG nova.virt.hardware [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 993.159729] env[69475]: DEBUG nova.virt.hardware [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 993.159870] env[69475]: DEBUG nova.virt.hardware [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 993.160060] env[69475]: DEBUG nova.virt.hardware [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 993.160376] env[69475]: DEBUG nova.virt.hardware [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 993.160442] env[69475]: DEBUG nova.virt.hardware [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 993.161284] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4024aee3-a62d-424e-870d-afdca635fb8c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.169156] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edfc5b21-bf6f-42e4-82a4-f2c8eaad84ab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.239775] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508574, 'name': CreateVM_Task, 'duration_secs': 0.366313} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.240080] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 993.240683] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.240787] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.241133] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 993.241380] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44bf5ab0-2448-4a5d-a5b3-cb39938936aa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.245612] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 993.245612] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52963b2d-d9ab-177d-2db4-91503b1977eb" [ 993.245612] env[69475]: _type = "Task" [ 993.245612] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.252936] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52963b2d-d9ab-177d-2db4-91503b1977eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.416370] env[69475]: DEBUG nova.network.neutron [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Successfully updated port: 886a1220-72c0-4395-8f70-1ab633f634ff {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 993.425565] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.425813] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.425990] env[69475]: DEBUG nova.compute.manager [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 993.426854] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d38927-6214-45c8-8262-f8d2d30302d1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.433959] env[69475]: DEBUG nova.compute.manager [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69475) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 993.477366] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508575, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068466} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.477692] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 993.478353] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd06465-7118-4f58-86d1-840e25c38aaa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.500681] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] 211f895a-bba5-4f10-9296-0d461af49f98/211f895a-bba5-4f10-9296-0d461af49f98.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 993.500965] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a4a83fa-a6c0-496a-97ae-4ad053eaf386 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.520931] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 993.520931] env[69475]: value = "task-3508576" [ 993.520931] env[69475]: _type = "Task" [ 993.520931] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.530542] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508576, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.565388] env[69475]: DEBUG oslo_concurrency.lockutils [req-b1e0cca0-e736-40f1-a55d-85a0f9c11a1b req-8ac54cd1-98c0-4d6d-abb1-920decaf612b service nova] Releasing lock "refresh_cache-e8657a44-d786-4fa6-b39c-28fc71415ce8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 993.597040] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b932636d-c33b-4682-a96d-7873dd06e01e tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "eadfea6c-3fce-4f54-b889-d994d61ec14f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.747s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.759729] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52963b2d-d9ab-177d-2db4-91503b1977eb, 'name': SearchDatastore_Task, 'duration_secs': 0.009121} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.759729] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 993.759969] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 993.760229] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.760423] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.760659] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 993.760863] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f086e82-d42e-4808-9377-2f885624e10a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.773108] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 993.773428] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 993.774173] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bba38121-4a06-4eb2-9b70-095ff55ea416 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.779229] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 993.779229] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5248390c-5540-6e61-fea3-5c6397a1dc50" [ 993.779229] env[69475]: _type = "Task" [ 993.779229] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.789993] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5248390c-5540-6e61-fea3-5c6397a1dc50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.850084] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e13dbdb-727d-4940-acd4-92353d8e757c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.856841] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-441251b6-5088-491e-98c6-50d9058448f0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.887209] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80658533-beda-40f7-90c2-6117f16c0f43 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.894703] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54616bdf-47b7-405b-80e0-dab696c7d5e6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.907822] env[69475]: DEBUG nova.compute.provider_tree [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.921048] env[69475]: DEBUG oslo_concurrency.lockutils [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "refresh_cache-f222cc16-7581-41ff-ae7c-0538c7b3c721" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.921048] env[69475]: DEBUG oslo_concurrency.lockutils [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired lock "refresh_cache-f222cc16-7581-41ff-ae7c-0538c7b3c721" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.921048] env[69475]: DEBUG nova.network.neutron [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 993.937435] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 993.937824] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-551c2ec2-530f-4260-8e7f-aaa06cdde9ad {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.946183] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 993.946183] env[69475]: value = "task-3508577" [ 993.946183] env[69475]: _type = "Task" [ 993.946183] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.953239] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508577, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.034875] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508576, 'name': ReconfigVM_Task, 'duration_secs': 0.27214} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.035166] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Reconfigured VM instance instance-00000058 to attach disk [datastore1] 211f895a-bba5-4f10-9296-0d461af49f98/211f895a-bba5-4f10-9296-0d461af49f98.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 994.035842] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-579f37d7-e60c-4b08-93c0-7a1606c03a51 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.042647] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 994.042647] env[69475]: value = "task-3508578" [ 994.042647] env[69475]: _type = "Task" [ 994.042647] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.050548] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508578, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.291339] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5248390c-5540-6e61-fea3-5c6397a1dc50, 'name': SearchDatastore_Task, 'duration_secs': 0.0084} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.292069] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2066de9d-038b-4b38-801f-53a8bee850e2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.297123] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 994.297123] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523d3d77-52cb-3921-7d26-9b6de2cb81d1" [ 994.297123] env[69475]: _type = "Task" [ 994.297123] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.304982] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523d3d77-52cb-3921-7d26-9b6de2cb81d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.411880] env[69475]: DEBUG nova.scheduler.client.report [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 994.450534] env[69475]: DEBUG nova.network.neutron [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 994.458055] env[69475]: DEBUG oslo_vmware.api [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508577, 'name': PowerOffVM_Task, 'duration_secs': 0.205976} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.458353] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 994.458560] env[69475]: DEBUG nova.compute.manager [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 994.459329] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dce19280-2ed5-453f-9ab6-c2215d723f45 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.552172] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508578, 'name': Rename_Task, 'duration_secs': 0.146283} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.552454] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 994.552715] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-987d0dde-0269-4501-ac4d-93ec45f1ee9c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.558789] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 994.558789] env[69475]: value = "task-3508579" [ 994.558789] env[69475]: _type = "Task" [ 994.558789] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.566614] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508579, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.569808] env[69475]: DEBUG nova.network.neutron [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Updating instance_info_cache with network_info: [{"id": "886a1220-72c0-4395-8f70-1ab633f634ff", "address": "fa:16:3e:9c:a8:22", "network": {"id": "49b579f0-c5a9-487e-b469-7e6420355dce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-83616973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6dd9c026624896ae4de7fab35720d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap886a1220-72", "ovs_interfaceid": "886a1220-72c0-4395-8f70-1ab633f634ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.811023] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523d3d77-52cb-3921-7d26-9b6de2cb81d1, 'name': SearchDatastore_Task, 'duration_secs': 0.009105} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.811023] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 994.811023] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] e8657a44-d786-4fa6-b39c-28fc71415ce8/e8657a44-d786-4fa6-b39c-28fc71415ce8.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 994.811023] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-89073ab2-e09c-43ff-8339-2537eec665fa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.816643] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 994.816643] env[69475]: value = "task-3508580" [ 994.816643] env[69475]: _type = "Task" [ 994.816643] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.824622] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508580, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.890994] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdca7e13-90a8-46e3-9391-6257cf2e9df4 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "82236043-3222-4134-8717-4c239ed12aba" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.891635] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdca7e13-90a8-46e3-9391-6257cf2e9df4 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "82236043-3222-4134-8717-4c239ed12aba" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.891943] env[69475]: DEBUG nova.compute.manager [None req-fdca7e13-90a8-46e3-9391-6257cf2e9df4 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 994.892962] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0ce9f2-081a-4065-a562-54fa00e9ef02 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.899746] env[69475]: DEBUG nova.compute.manager [None req-fdca7e13-90a8-46e3-9391-6257cf2e9df4 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69475) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 994.900613] env[69475]: DEBUG nova.objects.instance [None req-fdca7e13-90a8-46e3-9391-6257cf2e9df4 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lazy-loading 'flavor' on Instance uuid 82236043-3222-4134-8717-4c239ed12aba {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 994.916960] env[69475]: DEBUG oslo_concurrency.lockutils [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.865s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.919188] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.188s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.919408] env[69475]: DEBUG nova.objects.instance [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lazy-loading 'pci_requests' on Instance uuid e8c2d21e-2e42-48de-928e-c5fd944899b6 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 994.927142] env[69475]: DEBUG nova.compute.manager [req-d8e53e34-4d7d-4a90-b2da-546ed9140377 req-9d924ec0-907d-4366-b4ba-cbee96314848 service nova] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Received event network-vif-plugged-886a1220-72c0-4395-8f70-1ab633f634ff {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 994.927424] env[69475]: DEBUG oslo_concurrency.lockutils [req-d8e53e34-4d7d-4a90-b2da-546ed9140377 req-9d924ec0-907d-4366-b4ba-cbee96314848 service nova] Acquiring lock "f222cc16-7581-41ff-ae7c-0538c7b3c721-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 994.927567] env[69475]: DEBUG oslo_concurrency.lockutils [req-d8e53e34-4d7d-4a90-b2da-546ed9140377 req-9d924ec0-907d-4366-b4ba-cbee96314848 service nova] Lock "f222cc16-7581-41ff-ae7c-0538c7b3c721-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.927723] env[69475]: DEBUG oslo_concurrency.lockutils [req-d8e53e34-4d7d-4a90-b2da-546ed9140377 req-9d924ec0-907d-4366-b4ba-cbee96314848 service nova] Lock "f222cc16-7581-41ff-ae7c-0538c7b3c721-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.928330] env[69475]: DEBUG nova.compute.manager [req-d8e53e34-4d7d-4a90-b2da-546ed9140377 req-9d924ec0-907d-4366-b4ba-cbee96314848 service nova] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] No waiting events found dispatching network-vif-plugged-886a1220-72c0-4395-8f70-1ab633f634ff {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 994.928330] env[69475]: WARNING nova.compute.manager [req-d8e53e34-4d7d-4a90-b2da-546ed9140377 req-9d924ec0-907d-4366-b4ba-cbee96314848 service nova] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Received unexpected event network-vif-plugged-886a1220-72c0-4395-8f70-1ab633f634ff for instance with vm_state building and task_state spawning. [ 994.928330] env[69475]: DEBUG nova.compute.manager [req-d8e53e34-4d7d-4a90-b2da-546ed9140377 req-9d924ec0-907d-4366-b4ba-cbee96314848 service nova] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Received event network-changed-886a1220-72c0-4395-8f70-1ab633f634ff {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 994.928330] env[69475]: DEBUG nova.compute.manager [req-d8e53e34-4d7d-4a90-b2da-546ed9140377 req-9d924ec0-907d-4366-b4ba-cbee96314848 service nova] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Refreshing instance network info cache due to event network-changed-886a1220-72c0-4395-8f70-1ab633f634ff. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 994.928716] env[69475]: DEBUG oslo_concurrency.lockutils [req-d8e53e34-4d7d-4a90-b2da-546ed9140377 req-9d924ec0-907d-4366-b4ba-cbee96314848 service nova] Acquiring lock "refresh_cache-f222cc16-7581-41ff-ae7c-0538c7b3c721" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.956020] env[69475]: INFO nova.scheduler.client.report [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Deleted allocations for instance 02ba199b-a7dc-421c-a14a-b562da275377 [ 994.973547] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.548s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.070678] env[69475]: DEBUG oslo_vmware.api [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508579, 'name': PowerOnVM_Task, 'duration_secs': 0.424111} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.070818] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 995.070945] env[69475]: INFO nova.compute.manager [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Took 6.84 seconds to spawn the instance on the hypervisor. [ 995.071161] env[69475]: DEBUG nova.compute.manager [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 995.071677] env[69475]: DEBUG oslo_concurrency.lockutils [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Releasing lock "refresh_cache-f222cc16-7581-41ff-ae7c-0538c7b3c721" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 995.071978] env[69475]: DEBUG nova.compute.manager [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Instance network_info: |[{"id": "886a1220-72c0-4395-8f70-1ab633f634ff", "address": "fa:16:3e:9c:a8:22", "network": {"id": "49b579f0-c5a9-487e-b469-7e6420355dce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-83616973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6dd9c026624896ae4de7fab35720d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap886a1220-72", "ovs_interfaceid": "886a1220-72c0-4395-8f70-1ab633f634ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 995.072805] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ef8c9ce-25bf-463e-8319-a7ad69396af5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.076033] env[69475]: DEBUG oslo_concurrency.lockutils [req-d8e53e34-4d7d-4a90-b2da-546ed9140377 req-9d924ec0-907d-4366-b4ba-cbee96314848 service nova] Acquired lock "refresh_cache-f222cc16-7581-41ff-ae7c-0538c7b3c721" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 995.076033] env[69475]: DEBUG nova.network.neutron [req-d8e53e34-4d7d-4a90-b2da-546ed9140377 req-9d924ec0-907d-4366-b4ba-cbee96314848 service nova] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Refreshing network info cache for port 886a1220-72c0-4395-8f70-1ab633f634ff {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 995.077179] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:a8:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8b99a46-3e7f-4ef1-9e45-58e6cd17f210', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '886a1220-72c0-4395-8f70-1ab633f634ff', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 995.084653] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 995.087699] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 995.089145] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41e7c2f4-807e-440b-824a-159d2eb8360c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.117023] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 995.117023] env[69475]: value = "task-3508581" [ 995.117023] env[69475]: _type = "Task" [ 995.117023] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.124237] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508581, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.326892] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508580, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500868} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.327168] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] e8657a44-d786-4fa6-b39c-28fc71415ce8/e8657a44-d786-4fa6-b39c-28fc71415ce8.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 995.327381] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 995.327632] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3a617ed6-e938-4513-b2da-2ebbf0f1a348 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.333904] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 995.333904] env[69475]: value = "task-3508582" [ 995.333904] env[69475]: _type = "Task" [ 995.333904] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.342591] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508582, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.345475] env[69475]: DEBUG nova.network.neutron [req-d8e53e34-4d7d-4a90-b2da-546ed9140377 req-9d924ec0-907d-4366-b4ba-cbee96314848 service nova] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Updated VIF entry in instance network info cache for port 886a1220-72c0-4395-8f70-1ab633f634ff. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 995.345848] env[69475]: DEBUG nova.network.neutron [req-d8e53e34-4d7d-4a90-b2da-546ed9140377 req-9d924ec0-907d-4366-b4ba-cbee96314848 service nova] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Updating instance_info_cache with network_info: [{"id": "886a1220-72c0-4395-8f70-1ab633f634ff", "address": "fa:16:3e:9c:a8:22", "network": {"id": "49b579f0-c5a9-487e-b469-7e6420355dce", "bridge": "br-int", "label": "tempest-ImagesTestJSON-83616973-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4e6dd9c026624896ae4de7fab35720d8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8b99a46-3e7f-4ef1-9e45-58e6cd17f210", "external-id": "nsx-vlan-transportzone-704", "segmentation_id": 704, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap886a1220-72", "ovs_interfaceid": "886a1220-72c0-4395-8f70-1ab633f634ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 995.422752] env[69475]: DEBUG nova.objects.instance [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lazy-loading 'numa_topology' on Instance uuid e8c2d21e-2e42-48de-928e-c5fd944899b6 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 995.465074] env[69475]: DEBUG oslo_concurrency.lockutils [None req-53bd4ffb-6e65-48ab-b8ce-920e250d447e tempest-ServerRescueTestJSONUnderV235-122090117 tempest-ServerRescueTestJSONUnderV235-122090117-project-member] Lock "02ba199b-a7dc-421c-a14a-b562da275377" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.017s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 995.483836] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 995.620045] env[69475]: INFO nova.compute.manager [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Took 34.76 seconds to build instance. [ 995.629035] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508581, 'name': CreateVM_Task, 'duration_secs': 0.386165} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.629035] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 995.629035] env[69475]: DEBUG oslo_concurrency.lockutils [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.629035] env[69475]: DEBUG oslo_concurrency.lockutils [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 995.629035] env[69475]: DEBUG oslo_concurrency.lockutils [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 995.629035] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-644d8223-025b-46d1-9623-99ddcfcac650 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.632601] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 995.632601] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c35dba-c06d-38fb-0a01-142258c75b9e" [ 995.632601] env[69475]: _type = "Task" [ 995.632601] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.645648] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c35dba-c06d-38fb-0a01-142258c75b9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.846357] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508582, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072957} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.846624] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 995.847518] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6be6147-a2a3-4ea1-be90-06b7b5b8d3b1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.851855] env[69475]: DEBUG oslo_concurrency.lockutils [req-d8e53e34-4d7d-4a90-b2da-546ed9140377 req-9d924ec0-907d-4366-b4ba-cbee96314848 service nova] Releasing lock "refresh_cache-f222cc16-7581-41ff-ae7c-0538c7b3c721" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 995.874093] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] e8657a44-d786-4fa6-b39c-28fc71415ce8/e8657a44-d786-4fa6-b39c-28fc71415ce8.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 995.874093] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9c5730e6-b365-43d2-9d91-536d3ad3d15f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.892260] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 995.892260] env[69475]: value = "task-3508583" [ 995.892260] env[69475]: _type = "Task" [ 995.892260] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.900662] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508583, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.909616] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdca7e13-90a8-46e3-9391-6257cf2e9df4 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 995.909616] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5437ecd9-8ee1-4110-8cd0-bdb65d643080 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.914591] env[69475]: DEBUG oslo_vmware.api [None req-fdca7e13-90a8-46e3-9391-6257cf2e9df4 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 995.914591] env[69475]: value = "task-3508584" [ 995.914591] env[69475]: _type = "Task" [ 995.914591] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.924527] env[69475]: DEBUG oslo_vmware.api [None req-fdca7e13-90a8-46e3-9391-6257cf2e9df4 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508584, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.924957] env[69475]: INFO nova.compute.claims [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 996.123102] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c3e6e04-a84a-4023-82af-932193800355 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "211f895a-bba5-4f10-9296-0d461af49f98" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.275s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.142531] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c35dba-c06d-38fb-0a01-142258c75b9e, 'name': SearchDatastore_Task, 'duration_secs': 0.009267} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.142827] env[69475]: DEBUG oslo_concurrency.lockutils [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 996.143082] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 996.143368] env[69475]: DEBUG oslo_concurrency.lockutils [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.143518] env[69475]: DEBUG oslo_concurrency.lockutils [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.143702] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 996.143972] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa630e76-62be-4473-879c-71e6ca863694 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.153400] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 996.154050] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 996.154322] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59bef1c8-f01f-42dc-8160-e3bfc22d3efb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.160475] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 996.160475] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525ccb83-e7aa-c512-89cc-9dde8540f512" [ 996.160475] env[69475]: _type = "Task" [ 996.160475] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.168863] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525ccb83-e7aa-c512-89cc-9dde8540f512, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.402469] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508583, 'name': ReconfigVM_Task, 'duration_secs': 0.28135} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.402855] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Reconfigured VM instance instance-00000059 to attach disk [datastore1] e8657a44-d786-4fa6-b39c-28fc71415ce8/e8657a44-d786-4fa6-b39c-28fc71415ce8.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 996.403817] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-434a44cf-a4d4-4175-a396-fb7e037c3261 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.405187] env[69475]: DEBUG oslo_concurrency.lockutils [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.405405] env[69475]: DEBUG oslo_concurrency.lockutils [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.405607] env[69475]: DEBUG oslo_concurrency.lockutils [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.405773] env[69475]: DEBUG oslo_concurrency.lockutils [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.405935] env[69475]: DEBUG oslo_concurrency.lockutils [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.407968] env[69475]: INFO nova.compute.manager [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Terminating instance [ 996.415133] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 996.415133] env[69475]: value = "task-3508585" [ 996.415133] env[69475]: _type = "Task" [ 996.415133] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.423951] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508585, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.426812] env[69475]: DEBUG oslo_vmware.api [None req-fdca7e13-90a8-46e3-9391-6257cf2e9df4 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508584, 'name': PowerOffVM_Task, 'duration_secs': 0.366421} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.427053] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fdca7e13-90a8-46e3-9391-6257cf2e9df4 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 996.427246] env[69475]: DEBUG nova.compute.manager [None req-fdca7e13-90a8-46e3-9391-6257cf2e9df4 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 996.427974] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c7e620-9063-4b52-812a-88576fb6f0b2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.672327] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525ccb83-e7aa-c512-89cc-9dde8540f512, 'name': SearchDatastore_Task, 'duration_secs': 0.008591} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.673116] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d9a58b0-5ed5-4fe5-8b5f-3601c1dc3ed0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.678582] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 996.678582] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52afee50-d375-b051-35fc-86ff4c735dfe" [ 996.678582] env[69475]: _type = "Task" [ 996.678582] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.686727] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52afee50-d375-b051-35fc-86ff4c735dfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.805255] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "211f895a-bba5-4f10-9296-0d461af49f98" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.805255] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "211f895a-bba5-4f10-9296-0d461af49f98" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.805255] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "211f895a-bba5-4f10-9296-0d461af49f98-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 996.805255] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "211f895a-bba5-4f10-9296-0d461af49f98-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 996.805255] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "211f895a-bba5-4f10-9296-0d461af49f98-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.808765] env[69475]: INFO nova.compute.manager [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Terminating instance [ 996.911228] env[69475]: DEBUG nova.compute.manager [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 996.911496] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 996.912851] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57a6d17-6c97-4be3-ae35-eb536caee87b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.932138] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 996.932440] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508585, 'name': Rename_Task, 'duration_secs': 0.134971} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.932649] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0741f740-9113-416b-9f00-67c7caa636cc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.934221] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 996.934446] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-52223a8d-d44f-4deb-81f2-d404c46c42ec {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.947391] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 996.947391] env[69475]: value = "task-3508587" [ 996.947391] env[69475]: _type = "Task" [ 996.947391] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.947819] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fdca7e13-90a8-46e3-9391-6257cf2e9df4 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "82236043-3222-4134-8717-4c239ed12aba" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.056s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 996.958706] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508587, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.000392] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 997.001414] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 997.001414] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleting the datastore file [datastore1] dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 997.001414] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-135bab62-c41f-4fe0-88bd-94fec877d9bd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.011817] env[69475]: DEBUG oslo_vmware.api [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 997.011817] env[69475]: value = "task-3508588" [ 997.011817] env[69475]: _type = "Task" [ 997.011817] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.020936] env[69475]: DEBUG oslo_vmware.api [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508588, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.195104] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52afee50-d375-b051-35fc-86ff4c735dfe, 'name': SearchDatastore_Task, 'duration_secs': 0.00952} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.195393] env[69475]: DEBUG oslo_concurrency.lockutils [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 997.195651] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] f222cc16-7581-41ff-ae7c-0538c7b3c721/f222cc16-7581-41ff-ae7c-0538c7b3c721.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 997.195920] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9931308f-1f5c-41a3-9c53-093008228861 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.202260] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 997.202260] env[69475]: value = "task-3508589" [ 997.202260] env[69475]: _type = "Task" [ 997.202260] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.210653] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508589, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.316809] env[69475]: DEBUG nova.compute.manager [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 997.317176] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 997.318571] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a38355-2d68-46bc-b91c-6d35f47f171c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.330382] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 997.331686] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6627242d-25a5-477c-81f4-16aadc6e0ad0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.333883] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1b3d73-b820-40a0-965d-8462f265deb4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.343290] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f51367-9c00-42bd-86e2-1c4ffc391347 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.347715] env[69475]: DEBUG oslo_vmware.api [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 997.347715] env[69475]: value = "task-3508590" [ 997.347715] env[69475]: _type = "Task" [ 997.347715] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.380369] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ad2712-6bef-4e5f-a93a-1dd6bab149d7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.386157] env[69475]: DEBUG oslo_vmware.api [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508590, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.391187] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6843d5f0-4609-406d-b608-e5534ffe21f8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.405683] env[69475]: DEBUG nova.compute.provider_tree [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.461499] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508587, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.524871] env[69475]: DEBUG oslo_vmware.api [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508588, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206674} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.525333] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 997.525574] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 997.525876] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 997.526364] env[69475]: INFO nova.compute.manager [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Took 0.61 seconds to destroy the instance on the hypervisor. [ 997.526703] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 997.527024] env[69475]: DEBUG nova.compute.manager [-] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 997.527185] env[69475]: DEBUG nova.network.neutron [-] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 997.714189] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508589, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.856978] env[69475]: DEBUG oslo_vmware.api [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508590, 'name': PowerOffVM_Task, 'duration_secs': 0.232197} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.857635] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 997.857941] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 997.858320] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad5e990f-346b-4c6d-b307-ced603d2bc7c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.911048] env[69475]: DEBUG nova.scheduler.client.report [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 997.923688] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 997.923938] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 997.924160] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Deleting the datastore file [datastore1] 211f895a-bba5-4f10-9296-0d461af49f98 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 997.924526] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3ebec299-a79f-4d98-8406-ce10674a1aef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.931543] env[69475]: DEBUG oslo_vmware.api [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 997.931543] env[69475]: value = "task-3508592" [ 997.931543] env[69475]: _type = "Task" [ 997.931543] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.946861] env[69475]: DEBUG oslo_vmware.api [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508592, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.958555] env[69475]: DEBUG oslo_vmware.api [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508587, 'name': PowerOnVM_Task, 'duration_secs': 0.614738} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.958849] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 997.959051] env[69475]: INFO nova.compute.manager [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Took 7.27 seconds to spawn the instance on the hypervisor. [ 997.959711] env[69475]: DEBUG nova.compute.manager [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 997.960496] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e850fcfe-3396-4a42-8f29-d7fa6b2bc8fa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.965389] env[69475]: DEBUG nova.compute.manager [req-68f6a5e8-0f50-428b-a15b-147a27b14c31 req-2eabc943-a938-4266-a096-d50ae952b217 service nova] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Received event network-vif-deleted-faddd0cb-2c06-43e5-adff-b74e725a50ba {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 997.965599] env[69475]: INFO nova.compute.manager [req-68f6a5e8-0f50-428b-a15b-147a27b14c31 req-2eabc943-a938-4266-a096-d50ae952b217 service nova] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Neutron deleted interface faddd0cb-2c06-43e5-adff-b74e725a50ba; detaching it from the instance and deleting it from the info cache [ 997.965794] env[69475]: DEBUG nova.network.neutron [req-68f6a5e8-0f50-428b-a15b-147a27b14c31 req-2eabc943-a938-4266-a096-d50ae952b217 service nova] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.296599] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508589, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.571437} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.296936] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] f222cc16-7581-41ff-ae7c-0538c7b3c721/f222cc16-7581-41ff-ae7c-0538c7b3c721.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 998.297177] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 998.297432] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f91d5c07-f4ff-4898-8adb-32b79a632d41 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.304008] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 998.304008] env[69475]: value = "task-3508593" [ 998.304008] env[69475]: _type = "Task" [ 998.304008] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.315119] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508593, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.422025] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.501s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.422286] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.393s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 998.422677] env[69475]: DEBUG nova.objects.instance [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lazy-loading 'resources' on Instance uuid f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.443109] env[69475]: DEBUG oslo_vmware.api [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508592, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.447607] env[69475]: DEBUG nova.network.neutron [-] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.455798] env[69475]: INFO nova.network.neutron [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Updating port 87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 998.483404] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-22de98b3-8133-4760-8f37-e15dc1b10ade {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.486434] env[69475]: INFO nova.compute.manager [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Took 33.12 seconds to build instance. [ 998.495616] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38383ae0-8cc7-49b2-b200-13726a85b828 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.530786] env[69475]: DEBUG nova.compute.manager [req-68f6a5e8-0f50-428b-a15b-147a27b14c31 req-2eabc943-a938-4266-a096-d50ae952b217 service nova] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Detach interface failed, port_id=faddd0cb-2c06-43e5-adff-b74e725a50ba, reason: Instance dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 998.819020] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508593, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072915} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.819020] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 998.819020] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f253c52-9ff0-4b85-b48d-04f6eaace101 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.845267] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] f222cc16-7581-41ff-ae7c-0538c7b3c721/f222cc16-7581-41ff-ae7c-0538c7b3c721.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 998.846664] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d255bab5-4ddd-4ab7-9a18-f5b95b1c2442 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.866979] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 998.866979] env[69475]: value = "task-3508594" [ 998.866979] env[69475]: _type = "Task" [ 998.866979] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.874936] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508594, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.890196] env[69475]: DEBUG nova.compute.manager [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Stashing vm_state: stopped {{(pid=69475) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 998.942886] env[69475]: DEBUG oslo_vmware.api [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508592, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.680735} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.943948] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 998.944167] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 998.944447] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 998.944645] env[69475]: INFO nova.compute.manager [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Took 1.63 seconds to destroy the instance on the hypervisor. [ 998.944898] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 998.945359] env[69475]: DEBUG nova.compute.manager [-] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 998.945467] env[69475]: DEBUG nova.network.neutron [-] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 998.949883] env[69475]: INFO nova.compute.manager [-] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Took 1.42 seconds to deallocate network for instance. [ 998.992735] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dc427d7f-c153-4ab0-9a3f-0101b768ea75 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "e8657a44-d786-4fa6-b39c-28fc71415ce8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.636s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.364936] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc17d1b7-878b-441f-8e97-b7c445965fd4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.373750] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb7eb726-c54d-4481-bd86-4da163043347 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.379909] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508594, 'name': ReconfigVM_Task, 'duration_secs': 0.264319} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.380479] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Reconfigured VM instance instance-0000005a to attach disk [datastore1] f222cc16-7581-41ff-ae7c-0538c7b3c721/f222cc16-7581-41ff-ae7c-0538c7b3c721.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 999.381103] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37d09877-8afc-4670-b918-61eb9f704d2d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.412842] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871d4937-d334-4870-9981-6eb7bde43aea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.415694] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 999.415694] env[69475]: value = "task-3508595" [ 999.415694] env[69475]: _type = "Task" [ 999.415694] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.424180] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-476d808a-d838-48e2-bd4f-a5eb753e789b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.428350] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.431564] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508595, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.441597] env[69475]: DEBUG nova.compute.provider_tree [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 999.460966] env[69475]: DEBUG oslo_concurrency.lockutils [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.907666] env[69475]: DEBUG nova.compute.manager [req-1f19dda7-65d4-4568-8e1a-ca97a0182a80 req-5468e25d-b98a-4afd-8eea-a832f1856e67 service nova] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Received event network-vif-deleted-16c03d8c-ab68-4516-b540-a94b1d4caf49 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 999.907951] env[69475]: INFO nova.compute.manager [req-1f19dda7-65d4-4568-8e1a-ca97a0182a80 req-5468e25d-b98a-4afd-8eea-a832f1856e67 service nova] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Neutron deleted interface 16c03d8c-ab68-4516-b540-a94b1d4caf49; detaching it from the instance and deleting it from the info cache [ 999.908013] env[69475]: DEBUG nova.network.neutron [req-1f19dda7-65d4-4568-8e1a-ca97a0182a80 req-5468e25d-b98a-4afd-8eea-a832f1856e67 service nova] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.927537] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508595, 'name': Rename_Task, 'duration_secs': 0.134566} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.927757] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 999.927999] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2c5d683-5915-4742-bd22-295b4b483755 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.934454] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 999.934454] env[69475]: value = "task-3508596" [ 999.934454] env[69475]: _type = "Task" [ 999.934454] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.942833] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508596, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.944967] env[69475]: DEBUG nova.scheduler.client.report [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 999.977863] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.977863] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquired lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.978173] env[69475]: DEBUG nova.network.neutron [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1000.202713] env[69475]: DEBUG nova.compute.manager [req-b5ab0cd8-cb70-4ed1-a025-7cd47c4ef936 req-66117e43-2816-45c9-a654-c64cfa1c94a5 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Received event network-vif-plugged-87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1000.202955] env[69475]: DEBUG oslo_concurrency.lockutils [req-b5ab0cd8-cb70-4ed1-a025-7cd47c4ef936 req-66117e43-2816-45c9-a654-c64cfa1c94a5 service nova] Acquiring lock "e8c2d21e-2e42-48de-928e-c5fd944899b6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.205319] env[69475]: DEBUG oslo_concurrency.lockutils [req-b5ab0cd8-cb70-4ed1-a025-7cd47c4ef936 req-66117e43-2816-45c9-a654-c64cfa1c94a5 service nova] Lock "e8c2d21e-2e42-48de-928e-c5fd944899b6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.205319] env[69475]: DEBUG oslo_concurrency.lockutils [req-b5ab0cd8-cb70-4ed1-a025-7cd47c4ef936 req-66117e43-2816-45c9-a654-c64cfa1c94a5 service nova] Lock "e8c2d21e-2e42-48de-928e-c5fd944899b6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.205319] env[69475]: DEBUG nova.compute.manager [req-b5ab0cd8-cb70-4ed1-a025-7cd47c4ef936 req-66117e43-2816-45c9-a654-c64cfa1c94a5 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] No waiting events found dispatching network-vif-plugged-87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1000.205319] env[69475]: WARNING nova.compute.manager [req-b5ab0cd8-cb70-4ed1-a025-7cd47c4ef936 req-66117e43-2816-45c9-a654-c64cfa1c94a5 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Received unexpected event network-vif-plugged-87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 for instance with vm_state shelved_offloaded and task_state spawning. [ 1000.214690] env[69475]: DEBUG nova.network.neutron [-] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.412970] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-73809d0a-b73d-4521-88e5-25915634aa2a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.422961] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be7b2ac5-d590-43cd-9154-50bd84849c0a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.446689] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508596, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.449644] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.027s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.464328] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 22.685s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.470803] env[69475]: DEBUG nova.compute.manager [req-1f19dda7-65d4-4568-8e1a-ca97a0182a80 req-5468e25d-b98a-4afd-8eea-a832f1856e67 service nova] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Detach interface failed, port_id=16c03d8c-ab68-4516-b540-a94b1d4caf49, reason: Instance 211f895a-bba5-4f10-9296-0d461af49f98 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1000.477733] env[69475]: INFO nova.scheduler.client.report [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Deleted allocations for instance f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0 [ 1000.568714] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "e8657a44-d786-4fa6-b39c-28fc71415ce8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.568960] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "e8657a44-d786-4fa6-b39c-28fc71415ce8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.569184] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "e8657a44-d786-4fa6-b39c-28fc71415ce8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.569368] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "e8657a44-d786-4fa6-b39c-28fc71415ce8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1000.569533] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "e8657a44-d786-4fa6-b39c-28fc71415ce8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.572018] env[69475]: INFO nova.compute.manager [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Terminating instance [ 1000.715602] env[69475]: INFO nova.compute.manager [-] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Took 1.77 seconds to deallocate network for instance. [ 1000.735638] env[69475]: DEBUG nova.network.neutron [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Updating instance_info_cache with network_info: [{"id": "87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39", "address": "fa:16:3e:5a:ef:b3", "network": {"id": "62083b30-e966-4802-8960-367f1137a879", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-79648352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572bc56741e24d57a4d01f202c8fb78d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87df7fc0-9f", "ovs_interfaceid": "87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.949803] env[69475]: DEBUG oslo_vmware.api [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508596, 'name': PowerOnVM_Task, 'duration_secs': 0.72704} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.950441] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1000.950708] env[69475]: INFO nova.compute.manager [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Took 7.81 seconds to spawn the instance on the hypervisor. [ 1000.950896] env[69475]: DEBUG nova.compute.manager [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1000.951692] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3caec23-30e4-4591-bf67-a110bf26504c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.988568] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3b573f15-ed60-4e52-9d2a-fcaa47516029 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.343s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1000.989604] env[69475]: DEBUG oslo_concurrency.lockutils [req-413a93b8-b50d-4f55-bdc4-259bc356b060 req-afcca21c-5083-41b4-9579-05f8f2811e6b service nova] Acquired lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1000.990726] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41dd96e1-946b-4021-a1b5-534be49ddc9f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.002325] env[69475]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1001.002442] env[69475]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=69475) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1001.003074] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b9696d0b-310e-4942-ba7a-ded69724c331 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.011743] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54a89b97-da08-41b1-acde-d99e0dd9c270 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.050368] env[69475]: ERROR root [req-413a93b8-b50d-4f55-bdc4-259bc356b060 req-afcca21c-5083-41b4-9579-05f8f2811e6b service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-701001' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 479, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-701001' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-701001' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-701001'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-701001' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-701001' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-701001'}\n"]: nova.exception.InstanceNotFound: Instance f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0 could not be found. [ 1001.050792] env[69475]: DEBUG oslo_concurrency.lockutils [req-413a93b8-b50d-4f55-bdc4-259bc356b060 req-afcca21c-5083-41b4-9579-05f8f2811e6b service nova] Releasing lock "f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1001.050792] env[69475]: DEBUG nova.compute.manager [req-413a93b8-b50d-4f55-bdc4-259bc356b060 req-afcca21c-5083-41b4-9579-05f8f2811e6b service nova] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Detach interface failed, port_id=eb5ab964-44c1-4189-9805-f3c80abb01ca, reason: Instance f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1001.081673] env[69475]: DEBUG nova.compute.manager [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1001.081899] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1001.082831] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e9d071-9876-4700-8434-21163231a614 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.090478] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1001.090749] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-212fee9e-d3fe-4e45-9280-652fe70ffdea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.098794] env[69475]: DEBUG oslo_vmware.api [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1001.098794] env[69475]: value = "task-3508597" [ 1001.098794] env[69475]: _type = "Task" [ 1001.098794] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.109386] env[69475]: DEBUG oslo_vmware.api [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508597, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.225357] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1001.240786] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Releasing lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1001.272163] env[69475]: DEBUG nova.virt.hardware [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='d81d918b29c673b797b605400eedfde9',container_format='bare',created_at=2025-04-22T09:40:54Z,direct_url=,disk_format='vmdk',id=079770cf-a859-4f7a-ae7c-ef25478face9,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-1888587046-shelved',owner='572bc56741e24d57a4d01f202c8fb78d',properties=ImageMetaProps,protected=,size=31666688,status='active',tags=,updated_at=2025-04-22T09:41:10Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1001.272440] env[69475]: DEBUG nova.virt.hardware [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1001.273066] env[69475]: DEBUG nova.virt.hardware [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1001.273066] env[69475]: DEBUG nova.virt.hardware [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1001.273066] env[69475]: DEBUG nova.virt.hardware [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1001.273303] env[69475]: DEBUG nova.virt.hardware [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1001.273737] env[69475]: DEBUG nova.virt.hardware [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1001.273737] env[69475]: DEBUG nova.virt.hardware [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1001.274125] env[69475]: DEBUG nova.virt.hardware [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1001.274300] env[69475]: DEBUG nova.virt.hardware [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1001.274713] env[69475]: DEBUG nova.virt.hardware [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1001.275507] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2882c090-a027-4f2f-a79e-aba376d7ec0c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.286180] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dce884d-1f07-4a89-a4c8-ab7cae24eea5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.304898] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:ef:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1001.315115] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1001.319111] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1001.320133] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ebce575-c100-4174-b7d1-0d5e33aece2a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.346121] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1001.346121] env[69475]: value = "task-3508598" [ 1001.346121] env[69475]: _type = "Task" [ 1001.346121] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.354502] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508598, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.368877] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac28fb4e-4f4f-4b9e-8c4e-24f7770f2e3a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.376898] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e3d960d-c2f3-4db3-9b68-8d45b6eb957e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.411433] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-507dd7b4-6310-4467-a995-0e5c219e96cf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.420158] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-462543f6-4ac8-4536-ba2b-7b6416d9067c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.437154] env[69475]: DEBUG nova.compute.provider_tree [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.468926] env[69475]: INFO nova.compute.manager [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Took 35.50 seconds to build instance. [ 1001.609027] env[69475]: DEBUG oslo_vmware.api [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508597, 'name': PowerOffVM_Task, 'duration_secs': 0.343923} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.609991] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1001.610286] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1001.610598] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b70bbd70-a019-42bc-93ba-55865ec22f20 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.675457] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1001.675650] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1001.675958] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleting the datastore file [datastore1] e8657a44-d786-4fa6-b39c-28fc71415ce8 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1001.676269] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15fc08ed-ef3c-4a05-aa6a-0b4e79918da4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.683577] env[69475]: DEBUG oslo_vmware.api [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1001.683577] env[69475]: value = "task-3508600" [ 1001.683577] env[69475]: _type = "Task" [ 1001.683577] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.691711] env[69475]: DEBUG oslo_vmware.api [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508600, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.857223] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508598, 'name': CreateVM_Task, 'duration_secs': 0.349072} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.857304] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1001.858192] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/079770cf-a859-4f7a-ae7c-ef25478face9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.858974] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquired lock "[datastore2] devstack-image-cache_base/079770cf-a859-4f7a-ae7c-ef25478face9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.858974] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/079770cf-a859-4f7a-ae7c-ef25478face9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1001.858974] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47364698-cff2-4bfa-b0d2-4373431d2c5e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.864135] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1001.864135] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52441c3a-c27d-ec62-1fe0-d6515e5142a4" [ 1001.864135] env[69475]: _type = "Task" [ 1001.864135] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.875324] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52441c3a-c27d-ec62-1fe0-d6515e5142a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.941415] env[69475]: DEBUG nova.scheduler.client.report [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1001.970953] env[69475]: DEBUG oslo_concurrency.lockutils [None req-abb868dd-07b0-4576-8093-32247f4a45fb tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "f222cc16-7581-41ff-ae7c-0538c7b3c721" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.018s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.043752] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "4100fb43-1dae-40b1-8caa-11dd67962274" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.043752] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "4100fb43-1dae-40b1-8caa-11dd67962274" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.194732] env[69475]: DEBUG oslo_vmware.api [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508600, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128088} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.194967] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1002.195155] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1002.195336] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1002.195692] env[69475]: INFO nova.compute.manager [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1002.195818] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1002.196095] env[69475]: DEBUG nova.compute.manager [-] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1002.196174] env[69475]: DEBUG nova.network.neutron [-] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1002.376875] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Releasing lock "[datastore2] devstack-image-cache_base/079770cf-a859-4f7a-ae7c-ef25478face9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1002.377164] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Processing image 079770cf-a859-4f7a-ae7c-ef25478face9 {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1002.377410] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/079770cf-a859-4f7a-ae7c-ef25478face9/079770cf-a859-4f7a-ae7c-ef25478face9.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.377559] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquired lock "[datastore2] devstack-image-cache_base/079770cf-a859-4f7a-ae7c-ef25478face9/079770cf-a859-4f7a-ae7c-ef25478face9.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.377770] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1002.378050] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab639c81-a20c-47d7-8d6e-0582707da35c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.386778] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1002.386963] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1002.387695] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbf3fca4-23a3-4fea-bcf7-7ae94b31ac70 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.395029] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1002.395029] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52990fa0-ccfe-2298-8753-8bb4bf5dac35" [ 1002.395029] env[69475]: _type = "Task" [ 1002.395029] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.411441] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Preparing fetch location {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1002.411441] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Fetch image to [datastore2] OSTACK_IMG_e136b5ff-62c6-46e0-bdef-f704e9c38809/OSTACK_IMG_e136b5ff-62c6-46e0-bdef-f704e9c38809.vmdk {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1002.411441] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Downloading stream optimized image 079770cf-a859-4f7a-ae7c-ef25478face9 to [datastore2] OSTACK_IMG_e136b5ff-62c6-46e0-bdef-f704e9c38809/OSTACK_IMG_e136b5ff-62c6-46e0-bdef-f704e9c38809.vmdk on the data store datastore2 as vApp {{(pid=69475) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1002.411646] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Downloading image file data 079770cf-a859-4f7a-ae7c-ef25478face9 to the ESX as VM named 'OSTACK_IMG_e136b5ff-62c6-46e0-bdef-f704e9c38809' {{(pid=69475) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1002.500084] env[69475]: DEBUG nova.compute.manager [req-6954df29-213e-4e36-8663-d2b913fb7565 req-8347ff86-f313-4c74-b62b-324be4005fee service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Received event network-changed-87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1002.500084] env[69475]: DEBUG nova.compute.manager [req-6954df29-213e-4e36-8663-d2b913fb7565 req-8347ff86-f313-4c74-b62b-324be4005fee service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Refreshing instance network info cache due to event network-changed-87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1002.500084] env[69475]: DEBUG oslo_concurrency.lockutils [req-6954df29-213e-4e36-8663-d2b913fb7565 req-8347ff86-f313-4c74-b62b-324be4005fee service nova] Acquiring lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.500084] env[69475]: DEBUG oslo_concurrency.lockutils [req-6954df29-213e-4e36-8663-d2b913fb7565 req-8347ff86-f313-4c74-b62b-324be4005fee service nova] Acquired lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.500084] env[69475]: DEBUG nova.network.neutron [req-6954df29-213e-4e36-8663-d2b913fb7565 req-8347ff86-f313-4c74-b62b-324be4005fee service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Refreshing network info cache for port 87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1002.524854] env[69475]: DEBUG oslo_vmware.rw_handles [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1002.524854] env[69475]: value = "resgroup-9" [ 1002.524854] env[69475]: _type = "ResourcePool" [ 1002.524854] env[69475]: }. {{(pid=69475) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1002.525767] env[69475]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-d1e605e2-8bf6-4d12-9c76-5a024dc26eeb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.549498] env[69475]: DEBUG nova.compute.manager [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1002.559525] env[69475]: DEBUG oslo_vmware.rw_handles [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lease: (returnval){ [ 1002.559525] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521e2adc-6364-accc-d877-f63223f309c5" [ 1002.559525] env[69475]: _type = "HttpNfcLease" [ 1002.559525] env[69475]: } obtained for vApp import into resource pool (val){ [ 1002.559525] env[69475]: value = "resgroup-9" [ 1002.559525] env[69475]: _type = "ResourcePool" [ 1002.559525] env[69475]: }. {{(pid=69475) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1002.611706] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the lease: (returnval){ [ 1002.611706] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521e2adc-6364-accc-d877-f63223f309c5" [ 1002.611706] env[69475]: _type = "HttpNfcLease" [ 1002.611706] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1002.611706] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1002.611706] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521e2adc-6364-accc-d877-f63223f309c5" [ 1002.611706] env[69475]: _type = "HttpNfcLease" [ 1002.611706] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1002.741644] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "f8a82046-4589-45d2-a7a3-466fe4d8f9c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.742019] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "f8a82046-4589-45d2-a7a3-466fe4d8f9c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.969585] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.506s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.972934] env[69475]: DEBUG nova.network.neutron [-] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.974244] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.723s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.974455] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.977933] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.344s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.977933] env[69475]: DEBUG nova.objects.instance [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lazy-loading 'resources' on Instance uuid 9c27dcc3-67df-46ea-947d-b2ecdaeeb003 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1003.004982] env[69475]: INFO nova.scheduler.client.report [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Deleted allocations for instance b1b04eb9-ded6-4425-8a06-0c26c086a09b [ 1003.008644] env[69475]: DEBUG nova.compute.manager [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1003.018483] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9489b3d-f10b-42db-b531-f2181e5cc673 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.073157] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1003.073157] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521e2adc-6364-accc-d877-f63223f309c5" [ 1003.073157] env[69475]: _type = "HttpNfcLease" [ 1003.073157] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1003.073864] env[69475]: DEBUG oslo_vmware.rw_handles [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1003.073864] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521e2adc-6364-accc-d877-f63223f309c5" [ 1003.073864] env[69475]: _type = "HttpNfcLease" [ 1003.073864] env[69475]: }. {{(pid=69475) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1003.074663] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ca2de03-8d0b-4a30-9cf4-9325376ba397 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.077988] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.083669] env[69475]: DEBUG oslo_vmware.rw_handles [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525f0640-89a2-6acb-b02a-086212f1f5f4/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1003.086062] env[69475]: DEBUG oslo_vmware.rw_handles [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Creating HTTP connection to write to file with size = 31666688 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525f0640-89a2-6acb-b02a-086212f1f5f4/disk-0.vmdk. {{(pid=69475) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1003.150488] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-55b0a50a-04e9-4cd8-b8ac-9570b8382b03 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.216292] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.216566] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.244806] env[69475]: DEBUG nova.compute.manager [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1003.333931] env[69475]: DEBUG nova.network.neutron [req-6954df29-213e-4e36-8663-d2b913fb7565 req-8347ff86-f313-4c74-b62b-324be4005fee service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Updated VIF entry in instance network info cache for port 87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1003.334332] env[69475]: DEBUG nova.network.neutron [req-6954df29-213e-4e36-8663-d2b913fb7565 req-8347ff86-f313-4c74-b62b-324be4005fee service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Updating instance_info_cache with network_info: [{"id": "87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39", "address": "fa:16:3e:5a:ef:b3", "network": {"id": "62083b30-e966-4802-8960-367f1137a879", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-79648352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.238", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572bc56741e24d57a4d01f202c8fb78d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87df7fc0-9f", "ovs_interfaceid": "87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.486920] env[69475]: INFO nova.compute.manager [-] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Took 1.29 seconds to deallocate network for instance. [ 1003.530814] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5c5ae0db-eb9a-4159-a0b9-d2c490b5cbdf tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "b1b04eb9-ded6-4425-8a06-0c26c086a09b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.764s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.532583] env[69475]: INFO nova.compute.manager [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] instance snapshotting [ 1003.542099] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1626d6e5-615a-4bcc-adfb-b4a3914c9eb6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.576026] env[69475]: INFO nova.scheduler.client.report [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleted allocation for migration ffcfa08f-8f32-497c-b2ed-6898dd5bbd21 [ 1003.578200] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ec318e8-9ad7-4dcc-96f1-67d5e32f8a33 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.721042] env[69475]: DEBUG nova.compute.manager [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1003.772069] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.836784] env[69475]: DEBUG oslo_concurrency.lockutils [req-6954df29-213e-4e36-8663-d2b913fb7565 req-8347ff86-f313-4c74-b62b-324be4005fee service nova] Releasing lock "refresh_cache-e8c2d21e-2e42-48de-928e-c5fd944899b6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.932576] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1557860-c7ba-4fe5-998e-823f5cc1c787 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.948544] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef5b5a0-e544-4bed-a399-2b15d8058913 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.989896] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850ae2ec-1832-4429-b54c-eb5fbc87f2c8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.998186] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.005183] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c52f79-dbdd-4e38-b182-5dd46902ce04 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.018882] env[69475]: DEBUG nova.compute.provider_tree [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.089861] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b236b54d-ab6d-4a0b-8fb8-d9e28cee620a tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 29.475s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.096875] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1004.097239] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-53cdc4aa-fe3f-420f-9729-f6edd25b686c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.105887] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 1004.105887] env[69475]: value = "task-3508602" [ 1004.105887] env[69475]: _type = "Task" [ 1004.105887] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.116799] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508602, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.184468] env[69475]: INFO nova.compute.manager [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Rebuilding instance [ 1004.238740] env[69475]: DEBUG nova.compute.manager [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1004.238740] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1813f104-3d52-4ba5-b36d-c057ab6abc1c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.249011] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.267564] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "4b3b53d1-82bf-40e7-9988-af7b51e9883a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.267832] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "4b3b53d1-82bf-40e7-9988-af7b51e9883a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.268051] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "4b3b53d1-82bf-40e7-9988-af7b51e9883a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.268244] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "4b3b53d1-82bf-40e7-9988-af7b51e9883a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.268407] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "4b3b53d1-82bf-40e7-9988-af7b51e9883a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.270661] env[69475]: INFO nova.compute.manager [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Terminating instance [ 1004.274948] env[69475]: DEBUG oslo_vmware.rw_handles [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Completed reading data from the image iterator. {{(pid=69475) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1004.275176] env[69475]: DEBUG oslo_vmware.rw_handles [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525f0640-89a2-6acb-b02a-086212f1f5f4/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1004.276513] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69a4760f-2d16-423b-a747-92cc9e34d1ff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.284898] env[69475]: DEBUG oslo_vmware.rw_handles [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525f0640-89a2-6acb-b02a-086212f1f5f4/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1004.285340] env[69475]: DEBUG oslo_vmware.rw_handles [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525f0640-89a2-6acb-b02a-086212f1f5f4/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1004.285619] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-57345bee-249d-4359-9abf-e1dfdb2d914c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.481611] env[69475]: DEBUG oslo_vmware.rw_handles [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525f0640-89a2-6acb-b02a-086212f1f5f4/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1004.482448] env[69475]: INFO nova.virt.vmwareapi.images [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Downloaded image file data 079770cf-a859-4f7a-ae7c-ef25478face9 [ 1004.483134] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42287ad-1ac3-4123-896f-ef88c9be20a4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.498965] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9410aed7-19b5-4a87-b72b-9fd018d68e66 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.520935] env[69475]: DEBUG nova.compute.manager [req-b6396511-3c00-4142-946a-9aa5b1525702 req-566c6887-e162-428b-b180-3a826d414f7f service nova] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Received event network-vif-deleted-7dcffc2d-4fd4-4b04-9913-da880fac60ed {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1004.522762] env[69475]: INFO nova.virt.vmwareapi.images [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] The imported VM was unregistered [ 1004.525856] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Caching image {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1004.526100] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Creating directory with path [datastore2] devstack-image-cache_base/079770cf-a859-4f7a-ae7c-ef25478face9 {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1004.527142] env[69475]: DEBUG nova.scheduler.client.report [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1004.530243] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-986db12d-1a38-4a3d-9e8e-b45b205e8f50 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.541273] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Created directory with path [datastore2] devstack-image-cache_base/079770cf-a859-4f7a-ae7c-ef25478face9 {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1004.541479] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_e136b5ff-62c6-46e0-bdef-f704e9c38809/OSTACK_IMG_e136b5ff-62c6-46e0-bdef-f704e9c38809.vmdk to [datastore2] devstack-image-cache_base/079770cf-a859-4f7a-ae7c-ef25478face9/079770cf-a859-4f7a-ae7c-ef25478face9.vmdk. {{(pid=69475) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1004.542329] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-b11da492-bf10-4a79-be08-7568190ce3ed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.549386] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1004.549386] env[69475]: value = "task-3508604" [ 1004.549386] env[69475]: _type = "Task" [ 1004.549386] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.557872] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508604, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.616538] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508602, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.784025] env[69475]: DEBUG nova.compute.manager [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1004.784025] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1004.784025] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a86d8d2-6ddb-4ebd-894d-683551f41fe3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.793060] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1004.793436] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ebb7d7d-c7c9-4340-b110-0c906c2ad720 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.802288] env[69475]: DEBUG oslo_vmware.api [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 1004.802288] env[69475]: value = "task-3508605" [ 1004.802288] env[69475]: _type = "Task" [ 1004.802288] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.812454] env[69475]: DEBUG oslo_vmware.api [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508605, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.910991] env[69475]: DEBUG oslo_concurrency.lockutils [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.911368] env[69475]: DEBUG oslo_concurrency.lockutils [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.911584] env[69475]: DEBUG oslo_concurrency.lockutils [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "b8c50d0a-4b3d-4b70-9bd6-8304fa128e59-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1004.911769] env[69475]: DEBUG oslo_concurrency.lockutils [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "b8c50d0a-4b3d-4b70-9bd6-8304fa128e59-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.911937] env[69475]: DEBUG oslo_concurrency.lockutils [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "b8c50d0a-4b3d-4b70-9bd6-8304fa128e59-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.914268] env[69475]: INFO nova.compute.manager [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Terminating instance [ 1005.034020] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.057s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.036660] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.845s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.036989] env[69475]: DEBUG nova.objects.instance [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Lazy-loading 'resources' on Instance uuid c9b2f701-a73a-4561-b637-62e3ce98a44f {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.056196] env[69475]: INFO nova.scheduler.client.report [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Deleted allocations for instance 9c27dcc3-67df-46ea-947d-b2ecdaeeb003 [ 1005.067915] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508604, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.119439] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508602, 'name': CreateSnapshot_Task, 'duration_secs': 0.769191} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.119713] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1005.120635] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f022e99e-2424-4814-9884-5555dae4549f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.251258] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1005.251631] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a60c21a2-fe31-42a4-8cf5-5702dad483da {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.266894] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1005.266894] env[69475]: value = "task-3508606" [ 1005.266894] env[69475]: _type = "Task" [ 1005.266894] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.277991] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508606, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.314711] env[69475]: DEBUG oslo_vmware.api [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508605, 'name': PowerOffVM_Task, 'duration_secs': 0.370114} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.314984] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1005.315183] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1005.315443] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dd518a33-41a2-481f-8f52-58f2fc64eaed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.369400] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "24ef554b-30bf-4e28-856e-98eb7ec2618b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.369604] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "24ef554b-30bf-4e28-856e-98eb7ec2618b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.396428] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1005.396645] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1005.396831] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Deleting the datastore file [datastore2] 4b3b53d1-82bf-40e7-9988-af7b51e9883a {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1005.397113] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c171dc31-da1d-4ce2-8ac2-6f903946a90b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.406841] env[69475]: DEBUG oslo_vmware.api [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for the task: (returnval){ [ 1005.406841] env[69475]: value = "task-3508608" [ 1005.406841] env[69475]: _type = "Task" [ 1005.406841] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.418744] env[69475]: DEBUG nova.compute.manager [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1005.418968] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1005.419386] env[69475]: DEBUG oslo_vmware.api [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508608, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.420159] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02a810dc-e0d8-4c66-b021-00682bb66807 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.430522] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1005.430903] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-64485a7f-a2c0-4b00-8a14-e97845b3a7c9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.439138] env[69475]: DEBUG oslo_vmware.api [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 1005.439138] env[69475]: value = "task-3508609" [ 1005.439138] env[69475]: _type = "Task" [ 1005.439138] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.452783] env[69475]: DEBUG oslo_vmware.api [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508609, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.564401] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508604, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.569131] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c0f7c22b-2b04-4b70-ba75-b468d9aeda16 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "9c27dcc3-67df-46ea-947d-b2ecdaeeb003" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.592s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1005.642285] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1005.646028] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-12759206-99b5-4819-a35d-e537978598e5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.657994] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 1005.657994] env[69475]: value = "task-3508610" [ 1005.657994] env[69475]: _type = "Task" [ 1005.657994] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.671980] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508610, 'name': CloneVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.782027] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508606, 'name': PowerOffVM_Task, 'duration_secs': 0.316485} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.782396] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1005.782648] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1005.783533] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ba2bd6-ff9b-4565-acf4-c81bd3a4e717 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.796254] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1005.796573] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d397fcd-e577-4e29-9709-f7b25dea2ec3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.872800] env[69475]: DEBUG nova.compute.manager [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1005.877107] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1005.877337] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1005.877543] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Deleting the datastore file [datastore1] ff09407e-93ea-4919-ba5f-b7ee6dd018a4 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1005.878032] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-093598ba-f267-48dd-8a58-5158746182b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.891048] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1005.891048] env[69475]: value = "task-3508612" [ 1005.891048] env[69475]: _type = "Task" [ 1005.891048] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.896750] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d4960f7-9307-4e33-a062-aba6e2e31bd9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.905387] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508612, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.913906] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dfa8847-406e-4674-bcd8-c79763e87187 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.923408] env[69475]: DEBUG oslo_vmware.api [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508608, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.954468] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3638b1f0-f073-4a3b-b513-1018eacd8896 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.965592] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec9661e2-153c-4261-b60c-1f106487b450 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.969723] env[69475]: DEBUG oslo_vmware.api [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508609, 'name': PowerOffVM_Task, 'duration_secs': 0.2257} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.970427] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1005.970611] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1005.971331] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2f2955ae-2566-45da-b7b3-6b3c926c1170 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.982172] env[69475]: DEBUG nova.compute.provider_tree [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1005.997327] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "f40aa0bb-af1d-4f8f-a906-f1c83307b465" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.997591] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "f40aa0bb-af1d-4f8f-a906-f1c83307b465" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.997845] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "f40aa0bb-af1d-4f8f-a906-f1c83307b465-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1005.998094] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "f40aa0bb-af1d-4f8f-a906-f1c83307b465-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1005.998302] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "f40aa0bb-af1d-4f8f-a906-f1c83307b465-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.000673] env[69475]: INFO nova.compute.manager [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Terminating instance [ 1006.062728] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508604, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.064417] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1006.064645] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1006.064833] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleting the datastore file [datastore1] b8c50d0a-4b3d-4b70-9bd6-8304fa128e59 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1006.065127] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-478079d7-e011-4afe-893e-ff0e8b51be53 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.072165] env[69475]: DEBUG oslo_vmware.api [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 1006.072165] env[69475]: value = "task-3508614" [ 1006.072165] env[69475]: _type = "Task" [ 1006.072165] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.080708] env[69475]: DEBUG oslo_vmware.api [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508614, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.168135] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508610, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.395812] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.403464] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508612, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.242096} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.403762] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1006.403936] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1006.404121] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1006.418827] env[69475]: DEBUG oslo_vmware.api [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508608, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.486316] env[69475]: DEBUG nova.scheduler.client.report [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1006.505188] env[69475]: DEBUG nova.compute.manager [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1006.505428] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1006.506578] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-132debef-ca85-4605-9500-6937d90a85e5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.515102] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1006.515390] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0572ccea-1655-47ff-b876-f88ebdfc2149 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.521706] env[69475]: DEBUG oslo_vmware.api [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 1006.521706] env[69475]: value = "task-3508615" [ 1006.521706] env[69475]: _type = "Task" [ 1006.521706] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.530245] env[69475]: DEBUG oslo_vmware.api [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508615, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.561184] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508604, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.583462] env[69475]: DEBUG oslo_vmware.api [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508614, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.251537} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.583764] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1006.583957] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1006.584163] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1006.584343] env[69475]: INFO nova.compute.manager [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1006.584597] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1006.584831] env[69475]: DEBUG nova.compute.manager [-] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1006.584898] env[69475]: DEBUG nova.network.neutron [-] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1006.670076] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508610, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.925032] env[69475]: DEBUG oslo_vmware.api [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508608, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.992193] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.955s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.995028] env[69475]: DEBUG oslo_concurrency.lockutils [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.480s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.996842] env[69475]: INFO nova.compute.claims [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1007.002014] env[69475]: DEBUG nova.compute.manager [req-14db8b92-f6e4-4f32-9503-ed02992bd604 req-547c8b1a-b05b-4fa8-ab68-d850d40e8bb6 service nova] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Received event network-vif-deleted-afb4cf7c-0e25-4b9a-8f0d-90f08fecda68 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1007.002270] env[69475]: INFO nova.compute.manager [req-14db8b92-f6e4-4f32-9503-ed02992bd604 req-547c8b1a-b05b-4fa8-ab68-d850d40e8bb6 service nova] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Neutron deleted interface afb4cf7c-0e25-4b9a-8f0d-90f08fecda68; detaching it from the instance and deleting it from the info cache [ 1007.002493] env[69475]: DEBUG nova.network.neutron [req-14db8b92-f6e4-4f32-9503-ed02992bd604 req-547c8b1a-b05b-4fa8-ab68-d850d40e8bb6 service nova] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.023086] env[69475]: INFO nova.scheduler.client.report [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Deleted allocations for instance c9b2f701-a73a-4561-b637-62e3ce98a44f [ 1007.036275] env[69475]: DEBUG oslo_vmware.api [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508615, 'name': PowerOffVM_Task, 'duration_secs': 0.250476} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.036821] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.036999] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1007.037609] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8c5b88f-e841-470c-82ee-cb86d0dd5614 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.063735] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508604, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.113742] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1007.113956] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1007.114164] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Deleting the datastore file [datastore1] f40aa0bb-af1d-4f8f-a906-f1c83307b465 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1007.114451] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f13a428c-9b6b-4763-9c00-ad98ae78d8a6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.120833] env[69475]: DEBUG oslo_vmware.api [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for the task: (returnval){ [ 1007.120833] env[69475]: value = "task-3508617" [ 1007.120833] env[69475]: _type = "Task" [ 1007.120833] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.130170] env[69475]: DEBUG oslo_vmware.api [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508617, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.169932] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508610, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.424587] env[69475]: DEBUG oslo_vmware.api [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Task: {'id': task-3508608, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.673148} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.424843] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1007.425036] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1007.425214] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1007.425381] env[69475]: INFO nova.compute.manager [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Took 2.64 seconds to destroy the instance on the hypervisor. [ 1007.425662] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1007.426126] env[69475]: DEBUG nova.compute.manager [-] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1007.426126] env[69475]: DEBUG nova.network.neutron [-] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1007.439225] env[69475]: DEBUG nova.virt.hardware [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1007.440244] env[69475]: DEBUG nova.virt.hardware [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1007.440244] env[69475]: DEBUG nova.virt.hardware [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1007.440244] env[69475]: DEBUG nova.virt.hardware [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1007.440244] env[69475]: DEBUG nova.virt.hardware [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1007.440244] env[69475]: DEBUG nova.virt.hardware [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1007.440415] env[69475]: DEBUG nova.virt.hardware [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1007.440546] env[69475]: DEBUG nova.virt.hardware [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1007.440715] env[69475]: DEBUG nova.virt.hardware [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1007.440875] env[69475]: DEBUG nova.virt.hardware [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1007.441055] env[69475]: DEBUG nova.virt.hardware [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1007.441939] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880463df-8d88-488e-8fde-9f040115a04f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.444765] env[69475]: DEBUG nova.network.neutron [-] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.451139] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88adac25-105b-461e-8427-0076624b076f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.466809] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:10:a6:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271fe7a0-dfd7-409b-920a-cf83ef1a86a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de52f276-c28b-45f5-8248-9019b9765828', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1007.475835] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1007.475835] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1007.476118] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f147566d-5c43-4f99-a89e-20484c547f19 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.498647] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1007.498647] env[69475]: value = "task-3508618" [ 1007.498647] env[69475]: _type = "Task" [ 1007.498647] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.505695] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-13b326da-d7b3-4af0-8930-387371287c33 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.511863] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508618, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.519195] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d655c6cd-abdf-4ccb-abb3-28f4869fb61f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.534071] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e917ce1f-2510-404f-a97b-c77c4cef38cb tempest-ServersTestFqdnHostnames-286418074 tempest-ServersTestFqdnHostnames-286418074-project-member] Lock "c9b2f701-a73a-4561-b637-62e3ce98a44f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.279s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.554977] env[69475]: DEBUG nova.compute.manager [req-14db8b92-f6e4-4f32-9503-ed02992bd604 req-547c8b1a-b05b-4fa8-ab68-d850d40e8bb6 service nova] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Detach interface failed, port_id=afb4cf7c-0e25-4b9a-8f0d-90f08fecda68, reason: Instance b8c50d0a-4b3d-4b70-9bd6-8304fa128e59 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1007.565573] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508604, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.575747} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.565838] env[69475]: INFO nova.virt.vmwareapi.ds_util [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_e136b5ff-62c6-46e0-bdef-f704e9c38809/OSTACK_IMG_e136b5ff-62c6-46e0-bdef-f704e9c38809.vmdk to [datastore2] devstack-image-cache_base/079770cf-a859-4f7a-ae7c-ef25478face9/079770cf-a859-4f7a-ae7c-ef25478face9.vmdk. [ 1007.566034] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Cleaning up location [datastore2] OSTACK_IMG_e136b5ff-62c6-46e0-bdef-f704e9c38809 {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1007.566203] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_e136b5ff-62c6-46e0-bdef-f704e9c38809 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1007.566713] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c931f5c-1af4-4452-bbd7-3445a819ad67 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.573264] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1007.573264] env[69475]: value = "task-3508619" [ 1007.573264] env[69475]: _type = "Task" [ 1007.573264] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.585741] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508619, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.632048] env[69475]: DEBUG oslo_vmware.api [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Task: {'id': task-3508617, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.479557} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.632470] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1007.632606] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1007.632836] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1007.633125] env[69475]: INFO nova.compute.manager [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1007.633338] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1007.633561] env[69475]: DEBUG nova.compute.manager [-] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1007.633657] env[69475]: DEBUG nova.network.neutron [-] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1007.673904] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508610, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.931284] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "41ddf915-343b-46e4-834e-11ab3899242f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1007.931951] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "41ddf915-343b-46e4-834e-11ab3899242f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.947215] env[69475]: INFO nova.compute.manager [-] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Took 1.36 seconds to deallocate network for instance. [ 1008.011330] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508618, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.085015] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508619, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.062723} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.085291] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1008.085439] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Releasing lock "[datastore2] devstack-image-cache_base/079770cf-a859-4f7a-ae7c-ef25478face9/079770cf-a859-4f7a-ae7c-ef25478face9.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1008.085695] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/079770cf-a859-4f7a-ae7c-ef25478face9/079770cf-a859-4f7a-ae7c-ef25478face9.vmdk to [datastore2] e8c2d21e-2e42-48de-928e-c5fd944899b6/e8c2d21e-2e42-48de-928e-c5fd944899b6.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1008.085949] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-94c17070-8f29-419a-bddb-d8440da41014 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.092718] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1008.092718] env[69475]: value = "task-3508620" [ 1008.092718] env[69475]: _type = "Task" [ 1008.092718] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.102405] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508620, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.172278] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508610, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.188053] env[69475]: DEBUG nova.network.neutron [-] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.320964] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad2c7289-5396-4bd2-8922-753e8572cfe0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.330469] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58439344-aba0-43fe-b89a-745e953cc359 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.362735] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b20d6f-9dd1-4f16-a759-8b6b2ff187a0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.371416] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6262f358-0d83-4e40-a0fa-d8d5f1884f2e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.388799] env[69475]: DEBUG nova.compute.provider_tree [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1008.434992] env[69475]: DEBUG nova.compute.utils [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1008.455515] env[69475]: DEBUG oslo_concurrency.lockutils [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.476161] env[69475]: DEBUG nova.compute.manager [req-2a8e4538-80b5-470b-800c-ada6e3a426ba req-1acc06e5-df41-4ee8-ba67-c00140afb3ae service nova] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Received event network-vif-deleted-277b3f9d-a1c5-4f1b-be8a-4818987fd78e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1008.476161] env[69475]: INFO nova.compute.manager [req-2a8e4538-80b5-470b-800c-ada6e3a426ba req-1acc06e5-df41-4ee8-ba67-c00140afb3ae service nova] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Neutron deleted interface 277b3f9d-a1c5-4f1b-be8a-4818987fd78e; detaching it from the instance and deleting it from the info cache [ 1008.476341] env[69475]: DEBUG nova.network.neutron [req-2a8e4538-80b5-470b-800c-ada6e3a426ba req-1acc06e5-df41-4ee8-ba67-c00140afb3ae service nova] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.510441] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508618, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.560786] env[69475]: DEBUG nova.network.neutron [-] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.611361] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508620, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.675701] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508610, 'name': CloneVM_Task} progress is 95%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.691534] env[69475]: INFO nova.compute.manager [-] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Took 1.27 seconds to deallocate network for instance. [ 1008.892804] env[69475]: DEBUG nova.scheduler.client.report [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1008.939050] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "41ddf915-343b-46e4-834e-11ab3899242f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.979548] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4577a902-97ba-46f7-a352-98707499afc2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.992160] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db5e02c5-3bc4-4ac5-ad60-231b8fa07f41 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.014233] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508618, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.029710] env[69475]: DEBUG nova.compute.manager [req-2a8e4538-80b5-470b-800c-ada6e3a426ba req-1acc06e5-df41-4ee8-ba67-c00140afb3ae service nova] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Detach interface failed, port_id=277b3f9d-a1c5-4f1b-be8a-4818987fd78e, reason: Instance f40aa0bb-af1d-4f8f-a906-f1c83307b465 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1009.063489] env[69475]: INFO nova.compute.manager [-] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Took 1.43 seconds to deallocate network for instance. [ 1009.107097] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508620, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.176027] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508610, 'name': CloneVM_Task} progress is 95%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.199933] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.370178] env[69475]: DEBUG nova.compute.manager [req-fd6da871-faa0-4c23-8268-84bbfd3a8d37 req-9f84ae2a-a626-498c-af40-a0acf20f73bc service nova] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Received event network-vif-deleted-fd636137-6583-4c7a-937a-701561e4141a {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1009.399586] env[69475]: DEBUG oslo_concurrency.lockutils [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.403s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.399586] env[69475]: DEBUG nova.compute.manager [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1009.401889] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 20.658s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.520752] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508618, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.571471] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1009.609160] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508620, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.675879] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508610, 'name': CloneVM_Task} progress is 95%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.907046] env[69475]: DEBUG nova.compute.utils [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1009.915681] env[69475]: DEBUG nova.compute.manager [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1009.915875] env[69475]: DEBUG nova.network.neutron [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1009.963094] env[69475]: DEBUG nova.policy [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f164f821924c4f4aae565d7352fef4a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8ffeef220f04d9eb22ef69b68e9c34a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1010.013926] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "41ddf915-343b-46e4-834e-11ab3899242f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1010.015117] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "41ddf915-343b-46e4-834e-11ab3899242f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1010.015117] env[69475]: INFO nova.compute.manager [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Attaching volume 32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288 to /dev/sdb [ 1010.020069] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508618, 'name': CreateVM_Task, 'duration_secs': 2.274301} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.022530] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1010.023413] env[69475]: DEBUG oslo_concurrency.lockutils [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.023588] env[69475]: DEBUG oslo_concurrency.lockutils [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1010.023943] env[69475]: DEBUG oslo_concurrency.lockutils [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1010.024173] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce9c452f-2c7e-4f77-a792-6a87c7ac78f2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.032382] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1010.032382] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52fb04eb-0bed-c5a4-2738-6fea42e96adc" [ 1010.032382] env[69475]: _type = "Task" [ 1010.032382] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.044500] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52fb04eb-0bed-c5a4-2738-6fea42e96adc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.063178] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60981ed0-9e7c-40bf-8cd5-50aaf7a7e87f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.071484] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357e7066-59f9-410b-aaf6-eb44fcae111c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.087712] env[69475]: DEBUG nova.virt.block_device [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Updating existing volume attachment record: 9f81f2a3-5fc0-4797-9964-9e4712bd4fc6 {{(pid=69475) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1010.109910] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508620, 'name': CopyVirtualDisk_Task} progress is 80%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.176255] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508610, 'name': CloneVM_Task, 'duration_secs': 4.123934} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.176531] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Created linked-clone VM from snapshot [ 1010.177310] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f10bd5fb-7987-4310-ba29-0d6146787dfe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.186719] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Uploading image a49fab2a-20ff-4ff0-b10d-1fa57ab8071b {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1010.201270] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1010.201608] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-98a00892-18f8-4224-8fab-24c282b15897 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.210804] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 1010.210804] env[69475]: value = "task-3508621" [ 1010.210804] env[69475]: _type = "Task" [ 1010.210804] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.222723] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508621, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.417144] env[69475]: DEBUG nova.compute.manager [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1010.429168] env[69475]: INFO nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updating resource usage from migration d7afea0c-7a3e-479f-89f1-6da0ed8ba26e [ 1010.432322] env[69475]: DEBUG nova.network.neutron [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Successfully created port: f3242024-4636-460a-a57d-a89bdca2e37c {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1010.453063] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance baf27027-678d-4167-bb9b-df410aeb0e82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1010.457028] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 8d50b322-fa03-4e48-b74b-a63578e4701c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1010.457028] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 4b3b53d1-82bf-40e7-9988-af7b51e9883a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1010.457028] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance f40aa0bb-af1d-4f8f-a906-f1c83307b465 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1010.457028] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 8f18d683-7734-4798-8963-7336fe229f16 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1010.457028] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 4066a18f-acc5-49b5-941c-0711f29bdcd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1010.457028] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance ff09407e-93ea-4919-ba5f-b7ee6dd018a4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1010.457028] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 41ddf915-343b-46e4-834e-11ab3899242f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1010.457028] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance b8c50d0a-4b3d-4b70-9bd6-8304fa128e59 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1010.457028] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1010.457028] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance e8c2d21e-2e42-48de-928e-c5fd944899b6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1010.457028] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 211f895a-bba5-4f10-9296-0d461af49f98 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1010.457028] env[69475]: WARNING nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance e8657a44-d786-4fa6-b39c-28fc71415ce8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1010.457028] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance f222cc16-7581-41ff-ae7c-0538c7b3c721 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1010.457028] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 8963b50c-29ca-49fd-8289-1e1b7583ca25 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1010.547055] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52fb04eb-0bed-c5a4-2738-6fea42e96adc, 'name': SearchDatastore_Task, 'duration_secs': 0.012548} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.547570] env[69475]: DEBUG oslo_concurrency.lockutils [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1010.547676] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1010.547961] env[69475]: DEBUG oslo_concurrency.lockutils [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.548167] env[69475]: DEBUG oslo_concurrency.lockutils [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1010.548405] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1010.548703] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87a6ccdf-459e-4f47-b1b2-917305444fcb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.556417] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1010.556560] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1010.557328] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9dd52931-d521-4b78-9aaf-8d1b3334a15d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.562576] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1010.562576] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f1ab33-eb15-7b48-8626-afecd66933d8" [ 1010.562576] env[69475]: _type = "Task" [ 1010.562576] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.571114] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f1ab33-eb15-7b48-8626-afecd66933d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.608090] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508620, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.472167} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.608385] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/079770cf-a859-4f7a-ae7c-ef25478face9/079770cf-a859-4f7a-ae7c-ef25478face9.vmdk to [datastore2] e8c2d21e-2e42-48de-928e-c5fd944899b6/e8c2d21e-2e42-48de-928e-c5fd944899b6.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1010.609313] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea189cf0-adfa-42d4-818b-cf803ec427ee {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.632699] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] e8c2d21e-2e42-48de-928e-c5fd944899b6/e8c2d21e-2e42-48de-928e-c5fd944899b6.vmdk or device None with type streamOptimized {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1010.633067] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3c14d4b6-5917-4593-b72f-58b48d084d86 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.654903] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1010.654903] env[69475]: value = "task-3508623" [ 1010.654903] env[69475]: _type = "Task" [ 1010.654903] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.662679] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508623, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.722216] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508621, 'name': Destroy_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.961935] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 44bcaa36-ecd9-448b-b589-7c32066ede1d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 1010.962128] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Migration d7afea0c-7a3e-479f-89f1-6da0ed8ba26e is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1742}} [ 1010.962256] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 82236043-3222-4134-8717-4c239ed12aba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1011.073587] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f1ab33-eb15-7b48-8626-afecd66933d8, 'name': SearchDatastore_Task, 'duration_secs': 0.013391} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.074459] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6dc7cdc5-e5b3-4fca-904c-9cf122668938 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.079415] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1011.079415] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ae7c7c-9e7d-8814-bfbf-4df5607b56dc" [ 1011.079415] env[69475]: _type = "Task" [ 1011.079415] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.087060] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ae7c7c-9e7d-8814-bfbf-4df5607b56dc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.165863] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508623, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.220982] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508621, 'name': Destroy_Task, 'duration_secs': 0.74852} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.221345] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Destroyed the VM [ 1011.221619] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1011.221919] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-c06792a9-8b7b-452a-80a2-22ba81df6ac2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.227905] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 1011.227905] env[69475]: value = "task-3508626" [ 1011.227905] env[69475]: _type = "Task" [ 1011.227905] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.235848] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508626, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.437725] env[69475]: DEBUG nova.compute.manager [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1011.465693] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 4100fb43-1dae-40b1-8caa-11dd67962274 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 1011.471031] env[69475]: DEBUG nova.virt.hardware [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1011.471031] env[69475]: DEBUG nova.virt.hardware [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1011.471031] env[69475]: DEBUG nova.virt.hardware [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1011.471410] env[69475]: DEBUG nova.virt.hardware [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1011.471682] env[69475]: DEBUG nova.virt.hardware [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1011.471940] env[69475]: DEBUG nova.virt.hardware [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1011.472311] env[69475]: DEBUG nova.virt.hardware [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1011.472585] env[69475]: DEBUG nova.virt.hardware [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1011.472879] env[69475]: DEBUG nova.virt.hardware [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1011.473182] env[69475]: DEBUG nova.virt.hardware [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1011.474437] env[69475]: DEBUG nova.virt.hardware [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1011.474782] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6293f4-120f-4d57-9353-5b6fac26d2cf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.483621] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6355f8-c80f-4bb3-96d9-f9de159d452f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.589444] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ae7c7c-9e7d-8814-bfbf-4df5607b56dc, 'name': SearchDatastore_Task, 'duration_secs': 0.009899} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.589710] env[69475]: DEBUG oslo_concurrency.lockutils [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1011.590390] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] ff09407e-93ea-4919-ba5f-b7ee6dd018a4/ff09407e-93ea-4919-ba5f-b7ee6dd018a4.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1011.590390] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-301526f4-db53-4bc3-ad5f-e7833bb0f690 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.596569] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1011.596569] env[69475]: value = "task-3508627" [ 1011.596569] env[69475]: _type = "Task" [ 1011.596569] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.604147] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508627, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.664856] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508623, 'name': ReconfigVM_Task, 'duration_secs': 0.544599} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.665140] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Reconfigured VM instance instance-00000041 to attach disk [datastore2] e8c2d21e-2e42-48de-928e-c5fd944899b6/e8c2d21e-2e42-48de-928e-c5fd944899b6.vmdk or device None with type streamOptimized {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1011.666538] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_type': 'disk', 'encryption_secret_uuid': None, 'size': 0, 'encryption_options': None, 'boot_index': 0, 'encrypted': False, 'device_name': '/dev/sda', 'guest_format': None, 'disk_bus': None, 'encryption_format': None, 'image_id': 'afa9d32c-9f39-44fb-bf3b-50d35842a59f'}], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701066', 'volume_id': 'd875e52a-1617-4b13-83ce-60084abbe663', 'name': 'volume-d875e52a-1617-4b13-83ce-60084abbe663', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'e8c2d21e-2e42-48de-928e-c5fd944899b6', 'attached_at': '', 'detached_at': '', 'volume_id': 'd875e52a-1617-4b13-83ce-60084abbe663', 'serial': 'd875e52a-1617-4b13-83ce-60084abbe663'}, 'device_type': None, 'attachment_id': '90422dbc-19da-4bc1-82e3-95541b090f2a', 'mount_device': '/dev/sdb', 'delete_on_termination': False, 'boot_index': None, 'guest_format': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69475) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1011.666739] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Volume attach. Driver type: vmdk {{(pid=69475) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1011.666925] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701066', 'volume_id': 'd875e52a-1617-4b13-83ce-60084abbe663', 'name': 'volume-d875e52a-1617-4b13-83ce-60084abbe663', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'e8c2d21e-2e42-48de-928e-c5fd944899b6', 'attached_at': '', 'detached_at': '', 'volume_id': 'd875e52a-1617-4b13-83ce-60084abbe663', 'serial': 'd875e52a-1617-4b13-83ce-60084abbe663'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1011.667669] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee044b11-9f48-48d1-b74c-1082fea0b972 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.684975] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1855307c-5d57-4747-911f-0db03e81bce2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.709689] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] volume-d875e52a-1617-4b13-83ce-60084abbe663/volume-d875e52a-1617-4b13-83ce-60084abbe663.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1011.709899] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b6e481e-231a-4a39-ae5a-797b069c58e8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.727972] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1011.727972] env[69475]: value = "task-3508628" [ 1011.727972] env[69475]: _type = "Task" [ 1011.727972] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.738509] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508628, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.741746] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508626, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.978924] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance f8a82046-4589-45d2-a7a3-466fe4d8f9c6 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 1012.107456] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508627, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.486354} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.107722] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] ff09407e-93ea-4919-ba5f-b7ee6dd018a4/ff09407e-93ea-4919-ba5f-b7ee6dd018a4.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1012.107958] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1012.108227] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-63a4d8d5-e41c-44ea-8ade-465bb2b2aa12 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.114292] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1012.114292] env[69475]: value = "task-3508629" [ 1012.114292] env[69475]: _type = "Task" [ 1012.114292] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.122475] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508629, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.246396] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508628, 'name': ReconfigVM_Task, 'duration_secs': 0.33469} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.246671] env[69475]: DEBUG oslo_vmware.api [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508626, 'name': RemoveSnapshot_Task, 'duration_secs': 0.692121} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.246889] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Reconfigured VM instance instance-00000041 to attach disk [datastore2] volume-d875e52a-1617-4b13-83ce-60084abbe663/volume-d875e52a-1617-4b13-83ce-60084abbe663.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1012.253299] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1012.255766] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-948ef8ab-8f61-4bfa-a923-55c9f83594d8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.272639] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1012.272639] env[69475]: value = "task-3508630" [ 1012.272639] env[69475]: _type = "Task" [ 1012.272639] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.281018] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508630, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.332627] env[69475]: DEBUG nova.compute.manager [req-68ae9968-2bef-4626-9ebf-87cfa0202b03 req-c461017a-a479-401f-aa18-3e517899d136 service nova] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Received event network-vif-plugged-f3242024-4636-460a-a57d-a89bdca2e37c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1012.333278] env[69475]: DEBUG oslo_concurrency.lockutils [req-68ae9968-2bef-4626-9ebf-87cfa0202b03 req-c461017a-a479-401f-aa18-3e517899d136 service nova] Acquiring lock "8963b50c-29ca-49fd-8289-1e1b7583ca25-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1012.333278] env[69475]: DEBUG oslo_concurrency.lockutils [req-68ae9968-2bef-4626-9ebf-87cfa0202b03 req-c461017a-a479-401f-aa18-3e517899d136 service nova] Lock "8963b50c-29ca-49fd-8289-1e1b7583ca25-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1012.333278] env[69475]: DEBUG oslo_concurrency.lockutils [req-68ae9968-2bef-4626-9ebf-87cfa0202b03 req-c461017a-a479-401f-aa18-3e517899d136 service nova] Lock "8963b50c-29ca-49fd-8289-1e1b7583ca25-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.333462] env[69475]: DEBUG nova.compute.manager [req-68ae9968-2bef-4626-9ebf-87cfa0202b03 req-c461017a-a479-401f-aa18-3e517899d136 service nova] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] No waiting events found dispatching network-vif-plugged-f3242024-4636-460a-a57d-a89bdca2e37c {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1012.333716] env[69475]: WARNING nova.compute.manager [req-68ae9968-2bef-4626-9ebf-87cfa0202b03 req-c461017a-a479-401f-aa18-3e517899d136 service nova] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Received unexpected event network-vif-plugged-f3242024-4636-460a-a57d-a89bdca2e37c for instance with vm_state building and task_state spawning. [ 1012.420588] env[69475]: DEBUG nova.network.neutron [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Successfully updated port: f3242024-4636-460a-a57d-a89bdca2e37c {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1012.483443] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 1012.626690] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508629, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061494} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.626946] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1012.627789] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc926426-bfad-48f7-9873-88bee0a590f7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.650165] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] ff09407e-93ea-4919-ba5f-b7ee6dd018a4/ff09407e-93ea-4919-ba5f-b7ee6dd018a4.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1012.650734] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1c53a87d-99d1-455a-9acf-2f1d7bf91a28 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.670311] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1012.670311] env[69475]: value = "task-3508631" [ 1012.670311] env[69475]: _type = "Task" [ 1012.670311] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.678146] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508631, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.768504] env[69475]: WARNING nova.compute.manager [None req-7101ad1b-6495-424c-a7b2-f918c6d6edcf tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Image not found during snapshot: nova.exception.ImageNotFound: Image a49fab2a-20ff-4ff0-b10d-1fa57ab8071b could not be found. [ 1012.781036] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508630, 'name': ReconfigVM_Task, 'duration_secs': 0.144847} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.781851] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701066', 'volume_id': 'd875e52a-1617-4b13-83ce-60084abbe663', 'name': 'volume-d875e52a-1617-4b13-83ce-60084abbe663', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'e8c2d21e-2e42-48de-928e-c5fd944899b6', 'attached_at': '', 'detached_at': '', 'volume_id': 'd875e52a-1617-4b13-83ce-60084abbe663', 'serial': 'd875e52a-1617-4b13-83ce-60084abbe663'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1012.782396] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-437af0cd-d9df-4970-a921-1617f3fec6a2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.788282] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1012.788282] env[69475]: value = "task-3508633" [ 1012.788282] env[69475]: _type = "Task" [ 1012.788282] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.795941] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508633, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.923962] env[69475]: DEBUG oslo_concurrency.lockutils [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "refresh_cache-8963b50c-29ca-49fd-8289-1e1b7583ca25" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.924178] env[69475]: DEBUG oslo_concurrency.lockutils [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "refresh_cache-8963b50c-29ca-49fd-8289-1e1b7583ca25" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.924410] env[69475]: DEBUG nova.network.neutron [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1012.986258] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 24ef554b-30bf-4e28-856e-98eb7ec2618b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 1012.986382] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1012.986498] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1013.098118] env[69475]: DEBUG oslo_concurrency.lockutils [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "f222cc16-7581-41ff-ae7c-0538c7b3c721" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.098486] env[69475]: DEBUG oslo_concurrency.lockutils [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "f222cc16-7581-41ff-ae7c-0538c7b3c721" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.098559] env[69475]: DEBUG oslo_concurrency.lockutils [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "f222cc16-7581-41ff-ae7c-0538c7b3c721-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.098737] env[69475]: DEBUG oslo_concurrency.lockutils [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "f222cc16-7581-41ff-ae7c-0538c7b3c721-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.098901] env[69475]: DEBUG oslo_concurrency.lockutils [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "f222cc16-7581-41ff-ae7c-0538c7b3c721-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.100886] env[69475]: INFO nova.compute.manager [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Terminating instance [ 1013.181007] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508631, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.239114] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2edc0572-c78a-4978-9769-845f68523ebe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.247408] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1ee8f1-f6c3-41c0-9279-9efa744a120c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.279635] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cf79506-c731-44f8-8e78-3cb3bef2897b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.287134] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eaeaca9-c64a-4bdc-88a4-1915549033c7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.298530] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508633, 'name': Rename_Task, 'duration_secs': 0.145448} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.305818] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1013.306271] env[69475]: DEBUG nova.compute.provider_tree [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1013.307577] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c7da6875-7740-4678-ab11-1962c79adf2f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.313410] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1013.313410] env[69475]: value = "task-3508634" [ 1013.313410] env[69475]: _type = "Task" [ 1013.313410] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.321679] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508634, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.455367] env[69475]: DEBUG nova.network.neutron [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1013.601811] env[69475]: DEBUG nova.network.neutron [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Updating instance_info_cache with network_info: [{"id": "f3242024-4636-460a-a57d-a89bdca2e37c", "address": "fa:16:3e:32:72:73", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3242024-46", "ovs_interfaceid": "f3242024-4636-460a-a57d-a89bdca2e37c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.607494] env[69475]: DEBUG nova.compute.manager [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1013.607583] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1013.608526] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb3aad16-89e4-45bb-a1b2-46f2f4498be0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.616729] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1013.616994] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b286e4c-96c1-418c-ad9f-2e9dfbf0174d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.623049] env[69475]: DEBUG oslo_vmware.api [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 1013.623049] env[69475]: value = "task-3508635" [ 1013.623049] env[69475]: _type = "Task" [ 1013.623049] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.631109] env[69475]: DEBUG oslo_vmware.api [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508635, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.681353] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508631, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.810753] env[69475]: DEBUG nova.scheduler.client.report [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1013.822792] env[69475]: DEBUG oslo_vmware.api [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508634, 'name': PowerOnVM_Task, 'duration_secs': 0.464278} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.823024] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1013.922025] env[69475]: DEBUG nova.compute.manager [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1013.922140] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b8f0ee6-cc08-449d-8983-419f9719bcb2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.104704] env[69475]: DEBUG oslo_concurrency.lockutils [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "refresh_cache-8963b50c-29ca-49fd-8289-1e1b7583ca25" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1014.105065] env[69475]: DEBUG nova.compute.manager [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Instance network_info: |[{"id": "f3242024-4636-460a-a57d-a89bdca2e37c", "address": "fa:16:3e:32:72:73", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3242024-46", "ovs_interfaceid": "f3242024-4636-460a-a57d-a89bdca2e37c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1014.105836] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:72:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3242024-4636-460a-a57d-a89bdca2e37c', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1014.113578] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1014.113798] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1014.114645] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5bcc9b8e-8b4f-4623-b29e-28b151f61a41 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.138557] env[69475]: DEBUG oslo_vmware.api [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508635, 'name': PowerOffVM_Task, 'duration_secs': 0.19269} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.139804] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1014.139986] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1014.140220] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1014.140220] env[69475]: value = "task-3508636" [ 1014.140220] env[69475]: _type = "Task" [ 1014.140220] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.140398] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-97f3b05b-9959-4e94-80a2-f0f977307f3b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.149152] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508636, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.181616] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508631, 'name': ReconfigVM_Task, 'duration_secs': 1.016337} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.181892] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Reconfigured VM instance instance-00000055 to attach disk [datastore1] ff09407e-93ea-4919-ba5f-b7ee6dd018a4/ff09407e-93ea-4919-ba5f-b7ee6dd018a4.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1014.182535] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e6a9248-ce65-4530-a500-02e5e2aec96d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.188389] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1014.188389] env[69475]: value = "task-3508638" [ 1014.188389] env[69475]: _type = "Task" [ 1014.188389] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.196351] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508638, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.209974] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1014.210238] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1014.210400] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Deleting the datastore file [datastore1] f222cc16-7581-41ff-ae7c-0538c7b3c721 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1014.210672] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d8be407a-011a-4c79-8b8d-7bd1f367854e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.216824] env[69475]: DEBUG oslo_vmware.api [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for the task: (returnval){ [ 1014.216824] env[69475]: value = "task-3508639" [ 1014.216824] env[69475]: _type = "Task" [ 1014.216824] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.225342] env[69475]: DEBUG oslo_vmware.api [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508639, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.319060] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69475) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1014.319418] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.918s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.319670] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.349s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1014.321311] env[69475]: INFO nova.compute.claims [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1014.401440] env[69475]: DEBUG nova.compute.manager [req-bcadb8a7-fe5b-4759-aaa4-4bc411d98ff2 req-064b47f3-7fb1-4001-b7bf-bfba6ef0e097 service nova] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Received event network-changed-f3242024-4636-460a-a57d-a89bdca2e37c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1014.401712] env[69475]: DEBUG nova.compute.manager [req-bcadb8a7-fe5b-4759-aaa4-4bc411d98ff2 req-064b47f3-7fb1-4001-b7bf-bfba6ef0e097 service nova] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Refreshing instance network info cache due to event network-changed-f3242024-4636-460a-a57d-a89bdca2e37c. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1014.402130] env[69475]: DEBUG oslo_concurrency.lockutils [req-bcadb8a7-fe5b-4759-aaa4-4bc411d98ff2 req-064b47f3-7fb1-4001-b7bf-bfba6ef0e097 service nova] Acquiring lock "refresh_cache-8963b50c-29ca-49fd-8289-1e1b7583ca25" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.403126] env[69475]: DEBUG oslo_concurrency.lockutils [req-bcadb8a7-fe5b-4759-aaa4-4bc411d98ff2 req-064b47f3-7fb1-4001-b7bf-bfba6ef0e097 service nova] Acquired lock "refresh_cache-8963b50c-29ca-49fd-8289-1e1b7583ca25" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1014.403126] env[69475]: DEBUG nova.network.neutron [req-bcadb8a7-fe5b-4759-aaa4-4bc411d98ff2 req-064b47f3-7fb1-4001-b7bf-bfba6ef0e097 service nova] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Refreshing network info cache for port f3242024-4636-460a-a57d-a89bdca2e37c {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1014.442456] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c64abf3d-95f9-498d-8edf-f3b3f3ae8989 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e8c2d21e-2e42-48de-928e-c5fd944899b6" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 53.590s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1014.651127] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508636, 'name': CreateVM_Task, 'duration_secs': 0.412661} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.651301] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1014.651985] env[69475]: DEBUG oslo_concurrency.lockutils [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.652166] env[69475]: DEBUG oslo_concurrency.lockutils [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1014.652482] env[69475]: DEBUG oslo_concurrency.lockutils [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1014.652732] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a002e4b-2f55-43a0-a034-0d1cde2f1390 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.657391] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1014.657391] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520b9e16-9c3b-5316-9203-65fd821e24b5" [ 1014.657391] env[69475]: _type = "Task" [ 1014.657391] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.665166] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520b9e16-9c3b-5316-9203-65fd821e24b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.697270] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508638, 'name': Rename_Task, 'duration_secs': 0.148297} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.697539] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1014.697767] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3c5e783-27ae-4b15-b254-14f8e0a31cd0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.705814] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1014.705814] env[69475]: value = "task-3508640" [ 1014.705814] env[69475]: _type = "Task" [ 1014.705814] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.719914] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508640, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.728215] env[69475]: DEBUG oslo_vmware.api [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Task: {'id': task-3508639, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140014} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.728520] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1014.728782] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1014.729067] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1014.729277] env[69475]: INFO nova.compute.manager [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1014.729524] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1014.729711] env[69475]: DEBUG nova.compute.manager [-] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1014.729816] env[69475]: DEBUG nova.network.neutron [-] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1015.116686] env[69475]: DEBUG nova.network.neutron [req-bcadb8a7-fe5b-4759-aaa4-4bc411d98ff2 req-064b47f3-7fb1-4001-b7bf-bfba6ef0e097 service nova] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Updated VIF entry in instance network info cache for port f3242024-4636-460a-a57d-a89bdca2e37c. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1015.117053] env[69475]: DEBUG nova.network.neutron [req-bcadb8a7-fe5b-4759-aaa4-4bc411d98ff2 req-064b47f3-7fb1-4001-b7bf-bfba6ef0e097 service nova] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Updating instance_info_cache with network_info: [{"id": "f3242024-4636-460a-a57d-a89bdca2e37c", "address": "fa:16:3e:32:72:73", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3242024-46", "ovs_interfaceid": "f3242024-4636-460a-a57d-a89bdca2e37c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.140796] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Volume attach. Driver type: vmdk {{(pid=69475) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1015.141107] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701080', 'volume_id': '32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288', 'name': 'volume-32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '41ddf915-343b-46e4-834e-11ab3899242f', 'attached_at': '', 'detached_at': '', 'volume_id': '32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288', 'serial': '32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1015.142000] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-548eb04c-56aa-405a-bd46-2cf26f3945ba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.163039] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3677ad6a-9f10-4d98-af08-9c5b8ed25775 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.171114] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520b9e16-9c3b-5316-9203-65fd821e24b5, 'name': SearchDatastore_Task, 'duration_secs': 0.00927} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.183739] env[69475]: DEBUG oslo_concurrency.lockutils [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.184368] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1015.184368] env[69475]: DEBUG oslo_concurrency.lockutils [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.184536] env[69475]: DEBUG oslo_concurrency.lockutils [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.184652] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1015.191985] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] volume-32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288/volume-32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1015.192238] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71d99965-0a1c-4261-b9ec-35e41a1e00cf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.193908] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2d5a188-4d4f-459d-93c0-b36daa6c01d1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.212882] env[69475]: DEBUG oslo_vmware.api [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1015.212882] env[69475]: value = "task-3508641" [ 1015.212882] env[69475]: _type = "Task" [ 1015.212882] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.216337] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508640, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.216565] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1015.216729] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1015.219778] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec5f311a-b720-4d6a-8318-bbde0b8ace46 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.226881] env[69475]: DEBUG oslo_vmware.api [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508641, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.227979] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1015.227979] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52de7996-8f50-26c7-4400-5bf0ffa207db" [ 1015.227979] env[69475]: _type = "Task" [ 1015.227979] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.235862] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52de7996-8f50-26c7-4400-5bf0ffa207db, 'name': SearchDatastore_Task, 'duration_secs': 0.008582} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.236576] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1ac5740-361c-4399-a8ba-20181d911774 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.240832] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1015.240832] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5231ab2b-02e4-8d9c-dda6-d8abecbb2162" [ 1015.240832] env[69475]: _type = "Task" [ 1015.240832] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.250935] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5231ab2b-02e4-8d9c-dda6-d8abecbb2162, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.599657] env[69475]: DEBUG nova.network.neutron [-] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.602886] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1b78b5-5be9-46a1-981b-14169a724df0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.615811] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35da32de-9b1b-41dd-a2de-667a581fcff5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.620876] env[69475]: DEBUG oslo_concurrency.lockutils [req-bcadb8a7-fe5b-4759-aaa4-4bc411d98ff2 req-064b47f3-7fb1-4001-b7bf-bfba6ef0e097 service nova] Releasing lock "refresh_cache-8963b50c-29ca-49fd-8289-1e1b7583ca25" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.664785] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5c1de6-d536-4676-a220-7dbca6d1143d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.675372] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1383a534-5355-4a75-8308-71a1ac2ad134 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.695665] env[69475]: DEBUG nova.compute.provider_tree [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1015.721887] env[69475]: DEBUG oslo_vmware.api [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508640, 'name': PowerOnVM_Task, 'duration_secs': 0.763099} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.722118] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1015.722326] env[69475]: DEBUG nova.compute.manager [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1015.723474] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e179d41-aa6f-4359-99fb-6aa7bd97a038 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.729186] env[69475]: DEBUG oslo_vmware.api [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508641, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.750338] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5231ab2b-02e4-8d9c-dda6-d8abecbb2162, 'name': SearchDatastore_Task, 'duration_secs': 0.008404} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.750699] env[69475]: DEBUG oslo_concurrency.lockutils [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.750960] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 8963b50c-29ca-49fd-8289-1e1b7583ca25/8963b50c-29ca-49fd-8289-1e1b7583ca25.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1015.751099] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d47eeb7f-79dc-4d96-85de-41a2be05aa12 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.756799] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1015.756799] env[69475]: value = "task-3508642" [ 1015.756799] env[69475]: _type = "Task" [ 1015.756799] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.765868] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508642, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.109050] env[69475]: INFO nova.compute.manager [-] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Took 1.38 seconds to deallocate network for instance. [ 1016.199576] env[69475]: DEBUG nova.scheduler.client.report [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1016.227681] env[69475]: DEBUG oslo_vmware.api [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508641, 'name': ReconfigVM_Task, 'duration_secs': 0.536963} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.227958] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Reconfigured VM instance instance-00000056 to attach disk [datastore1] volume-32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288/volume-32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1016.232970] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b22605e-69a6-4512-9660-230927d9df9f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.249444] env[69475]: DEBUG oslo_concurrency.lockutils [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.251039] env[69475]: DEBUG oslo_vmware.api [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1016.251039] env[69475]: value = "task-3508643" [ 1016.251039] env[69475]: _type = "Task" [ 1016.251039] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.259093] env[69475]: DEBUG oslo_vmware.api [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508643, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.266308] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508642, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.451647} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.266544] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 8963b50c-29ca-49fd-8289-1e1b7583ca25/8963b50c-29ca-49fd-8289-1e1b7583ca25.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1016.266755] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1016.266986] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-36bfa638-e11d-4a8b-8837-a1e93c6c8343 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.272693] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1016.272693] env[69475]: value = "task-3508644" [ 1016.272693] env[69475]: _type = "Task" [ 1016.272693] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.279967] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508644, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.428644] env[69475]: DEBUG nova.compute.manager [req-d14c2c60-3ec9-4605-aeee-f93d4fcfbea7 req-3bce7128-b69d-4b01-8e98-9d437d9b2104 service nova] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Received event network-vif-deleted-886a1220-72c0-4395-8f70-1ab633f634ff {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1016.616579] env[69475]: DEBUG oslo_concurrency.lockutils [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1016.704690] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.385s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1016.705245] env[69475]: DEBUG nova.compute.manager [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1016.707871] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 21.224s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1016.708119] env[69475]: DEBUG nova.objects.instance [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69475) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1016.761798] env[69475]: DEBUG oslo_vmware.api [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508643, 'name': ReconfigVM_Task, 'duration_secs': 0.147726} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.762108] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701080', 'volume_id': '32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288', 'name': 'volume-32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '41ddf915-343b-46e4-834e-11ab3899242f', 'attached_at': '', 'detached_at': '', 'volume_id': '32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288', 'serial': '32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1016.782452] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508644, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070618} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.782656] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1016.783448] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c64c28ae-c5d8-4e65-a453-f067c429d68a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.805424] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 8963b50c-29ca-49fd-8289-1e1b7583ca25/8963b50c-29ca-49fd-8289-1e1b7583ca25.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1016.805695] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8393c683-42b2-4908-85a9-78a1f49006db {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.824758] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1016.824758] env[69475]: value = "task-3508645" [ 1016.824758] env[69475]: _type = "Task" [ 1016.824758] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.832302] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508645, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.215627] env[69475]: DEBUG nova.compute.utils [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1017.216821] env[69475]: DEBUG nova.compute.manager [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Not allocating networking since 'none' was specified. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1017.334873] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508645, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.718641] env[69475]: DEBUG nova.compute.manager [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1017.722036] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a94efeb3-9b15-47a0-85d7-27b185a8feaf tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.723112] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 18.295s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.799818] env[69475]: DEBUG nova.objects.instance [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lazy-loading 'flavor' on Instance uuid 41ddf915-343b-46e4-834e-11ab3899242f {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1017.851967] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508645, 'name': ReconfigVM_Task, 'duration_secs': 0.813215} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.851967] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 8963b50c-29ca-49fd-8289-1e1b7583ca25/8963b50c-29ca-49fd-8289-1e1b7583ca25.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1017.851967] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f40f99a0-87bf-4656-af0a-fed2e257d229 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.851967] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1017.851967] env[69475]: value = "task-3508646" [ 1017.851967] env[69475]: _type = "Task" [ 1017.851967] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.853825] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508646, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.230586] env[69475]: INFO nova.compute.claims [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1018.304999] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ff3b7386-d561-4e3b-822d-3d93deaa9ffc tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "41ddf915-343b-46e4-834e-11ab3899242f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.291s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1018.354089] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508646, 'name': Rename_Task, 'duration_secs': 0.135063} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.354692] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1018.354950] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4cf2286e-dfe5-444b-9113-dac0e9c68439 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.361585] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1018.361585] env[69475]: value = "task-3508647" [ 1018.361585] env[69475]: _type = "Task" [ 1018.361585] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.369009] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508647, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.736476] env[69475]: DEBUG nova.compute.manager [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1018.739984] env[69475]: INFO nova.compute.resource_tracker [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updating resource usage from migration d7afea0c-7a3e-479f-89f1-6da0ed8ba26e [ 1018.762221] env[69475]: DEBUG nova.virt.hardware [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1018.762477] env[69475]: DEBUG nova.virt.hardware [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1018.762643] env[69475]: DEBUG nova.virt.hardware [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1018.762877] env[69475]: DEBUG nova.virt.hardware [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1018.763054] env[69475]: DEBUG nova.virt.hardware [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1018.763208] env[69475]: DEBUG nova.virt.hardware [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1018.763446] env[69475]: DEBUG nova.virt.hardware [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1018.763622] env[69475]: DEBUG nova.virt.hardware [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1018.763798] env[69475]: DEBUG nova.virt.hardware [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1018.763960] env[69475]: DEBUG nova.virt.hardware [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1018.764147] env[69475]: DEBUG nova.virt.hardware [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1018.765059] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99a3fb4a-9a5f-400d-a665-0406a00acd31 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.776407] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6fae6f-777b-4c6a-a92c-4860845d8fc5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.792919] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Instance VIF info [] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1018.798976] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Creating folder: Project (f76a63b30a644bef99d51e98553601e7). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1018.801609] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-42659320-ece7-447f-80b3-87e729f83259 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.812960] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Created folder: Project (f76a63b30a644bef99d51e98553601e7) in parent group-v700823. [ 1018.813233] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Creating folder: Instances. Parent ref: group-v701082. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1018.813639] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-537cfd29-1fb7-411f-92f5-b43d253b58a0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.826454] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Created folder: Instances in parent group-v701082. [ 1018.826684] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1018.826876] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1018.827087] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b0a363e-15a5-4523-be07-44c8ff50a169 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.846495] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1018.846495] env[69475]: value = "task-3508650" [ 1018.846495] env[69475]: _type = "Task" [ 1018.846495] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.855306] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508650, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.872161] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508647, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.046042] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b089a2a-6419-4f87-a271-f576634f588d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.053210] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8b173d-c7a6-4d7e-88c4-3d354ee69d7e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.085089] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a3d29c-92bb-4d6c-8489-cd5b8c43925b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.092239] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bd9c279-4b4b-439a-a345-12037c4dc3a6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.106617] env[69475]: DEBUG nova.compute.provider_tree [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1019.357132] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508650, 'name': CreateVM_Task, 'duration_secs': 0.453412} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.357315] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1019.357753] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.357916] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1019.358268] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1019.358525] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f572ad00-b51c-4718-8ef4-6bb643b4e058 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.363102] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1019.363102] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523cb840-35ea-37ba-f9b9-9ec22d0619db" [ 1019.363102] env[69475]: _type = "Task" [ 1019.363102] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.373851] env[69475]: DEBUG oslo_vmware.api [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508647, 'name': PowerOnVM_Task, 'duration_secs': 0.876443} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.377271] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1019.377471] env[69475]: INFO nova.compute.manager [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Took 7.94 seconds to spawn the instance on the hypervisor. [ 1019.377619] env[69475]: DEBUG nova.compute.manager [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1019.377892] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523cb840-35ea-37ba-f9b9-9ec22d0619db, 'name': SearchDatastore_Task, 'duration_secs': 0.009393} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.378560] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db4797c8-f802-4a46-be9f-899b259f75ae {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.381106] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1019.381335] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1019.381562] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1019.381711] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1019.382706] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1019.382706] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e82feb2-b53a-4e48-9418-2ded35f4c871 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.390934] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1019.391137] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1019.391794] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f276fe9-5490-4a75-b50d-182c3603e855 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.397230] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1019.397230] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5288f5b1-3190-1e53-d5a1-ef641583ee59" [ 1019.397230] env[69475]: _type = "Task" [ 1019.397230] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.404558] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5288f5b1-3190-1e53-d5a1-ef641583ee59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.583150] env[69475]: DEBUG oslo_concurrency.lockutils [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "20b37e69-5870-4f63-aeba-9293615da478" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.583448] env[69475]: DEBUG oslo_concurrency.lockutils [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "20b37e69-5870-4f63-aeba-9293615da478" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.609702] env[69475]: DEBUG nova.scheduler.client.report [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1019.904408] env[69475]: INFO nova.compute.manager [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Took 33.41 seconds to build instance. [ 1019.909468] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5288f5b1-3190-1e53-d5a1-ef641583ee59, 'name': SearchDatastore_Task, 'duration_secs': 0.009154} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.910383] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff69e1c3-94bb-4405-bbbf-5d8ff805676d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.915743] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1019.915743] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52820bc3-e050-4d6c-ef47-09cbdc6999fb" [ 1019.915743] env[69475]: _type = "Task" [ 1019.915743] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.925388] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52820bc3-e050-4d6c-ef47-09cbdc6999fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.057181] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8bfb88a4-984f-4c68-9465-a8d3f2d1d21f tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "8963b50c-29ca-49fd-8289-1e1b7583ca25" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.086030] env[69475]: DEBUG nova.compute.manager [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1020.114457] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.391s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.114725] env[69475]: INFO nova.compute.manager [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Migrating [ 1020.121454] env[69475]: DEBUG oslo_concurrency.lockutils [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.661s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.121649] env[69475]: DEBUG oslo_concurrency.lockutils [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.123714] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.899s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.123899] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.125562] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.048s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.126935] env[69475]: INFO nova.compute.claims [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1020.149105] env[69475]: INFO nova.scheduler.client.report [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Deleted allocations for instance 211f895a-bba5-4f10-9296-0d461af49f98 [ 1020.150754] env[69475]: INFO nova.scheduler.client.report [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleted allocations for instance dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac [ 1020.411141] env[69475]: DEBUG oslo_concurrency.lockutils [None req-748a89f3-de4c-4ee8-adf4-7bef3cb56a21 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "8963b50c-29ca-49fd-8289-1e1b7583ca25" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.924s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.411477] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8bfb88a4-984f-4c68-9465-a8d3f2d1d21f tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "8963b50c-29ca-49fd-8289-1e1b7583ca25" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.355s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1020.411837] env[69475]: DEBUG nova.compute.manager [None req-8bfb88a4-984f-4c68-9465-a8d3f2d1d21f tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1020.412767] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a812ef-bdc1-4545-806f-e08b31e6ff10 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.420975] env[69475]: DEBUG nova.compute.manager [None req-8bfb88a4-984f-4c68-9465-a8d3f2d1d21f tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69475) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1020.421600] env[69475]: DEBUG nova.objects.instance [None req-8bfb88a4-984f-4c68-9465-a8d3f2d1d21f tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lazy-loading 'flavor' on Instance uuid 8963b50c-29ca-49fd-8289-1e1b7583ca25 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1020.428166] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52820bc3-e050-4d6c-ef47-09cbdc6999fb, 'name': SearchDatastore_Task, 'duration_secs': 0.010109} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.428411] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1020.428655] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 44bcaa36-ecd9-448b-b589-7c32066ede1d/44bcaa36-ecd9-448b-b589-7c32066ede1d.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1020.428891] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-db090132-cdff-4541-8a82-e4d09fab628b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.435216] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1020.435216] env[69475]: value = "task-3508651" [ 1020.435216] env[69475]: _type = "Task" [ 1020.435216] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.443246] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508651, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.608781] env[69475]: DEBUG oslo_concurrency.lockutils [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1020.638780] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1020.639009] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1020.639920] env[69475]: DEBUG nova.network.neutron [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1020.663060] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4244d489-4175-469a-b145-fd3fb2cf3ab4 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "211f895a-bba5-4f10-9296-0d461af49f98" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.860s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.664381] env[69475]: DEBUG oslo_concurrency.lockutils [None req-635f71f9-8a53-4393-8a00-ae92845f7dbe tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.259s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.946156] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508651, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450735} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.946455] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 44bcaa36-ecd9-448b-b589-7c32066ede1d/44bcaa36-ecd9-448b-b589-7c32066ede1d.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1020.946631] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1020.946881] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-94f7c0f8-06f4-4fb1-80d7-5f5ab44807ef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.952781] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1020.952781] env[69475]: value = "task-3508652" [ 1020.952781] env[69475]: _type = "Task" [ 1020.952781] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.960464] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508652, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.399352] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5fbb9d5-a68e-4196-8d87-76a88e9e7e44 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.409748] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c57cc546-78c4-498d-bc71-b120e671b5b5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.416054] env[69475]: DEBUG nova.network.neutron [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updating instance_info_cache with network_info: [{"id": "91ad3911-8ea3-4bb6-bcf5-fd800e27e57f", "address": "fa:16:3e:a7:cb:82", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91ad3911-8e", "ovs_interfaceid": "91ad3911-8ea3-4bb6-bcf5-fd800e27e57f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1021.444270] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bfb88a4-984f-4c68-9465-a8d3f2d1d21f tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1021.445519] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6797a977-6a7c-4e2d-8e6d-da62ad20b2f5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.447719] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca9abcb-65e0-4e2f-a011-cc64afdb2eba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.459491] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f2c2c7-93a4-46eb-b66b-e8ffb8f0e8bb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.464295] env[69475]: DEBUG oslo_vmware.api [None req-8bfb88a4-984f-4c68-9465-a8d3f2d1d21f tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1021.464295] env[69475]: value = "task-3508653" [ 1021.464295] env[69475]: _type = "Task" [ 1021.464295] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.479859] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508652, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069652} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.480705] env[69475]: DEBUG nova.compute.provider_tree [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.483038] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1021.483595] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8f40dc-a05f-454c-b1d0-36ebf36a7d7d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.492095] env[69475]: DEBUG oslo_vmware.api [None req-8bfb88a4-984f-4c68-9465-a8d3f2d1d21f tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508653, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.511219] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] 44bcaa36-ecd9-448b-b589-7c32066ede1d/44bcaa36-ecd9-448b-b589-7c32066ede1d.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1021.511840] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1d4575ce-0fc4-4e32-879d-9093b181f779 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.532461] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1021.532461] env[69475]: value = "task-3508654" [ 1021.532461] env[69475]: _type = "Task" [ 1021.532461] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.540294] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508654, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.919552] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1021.976108] env[69475]: DEBUG oslo_vmware.api [None req-8bfb88a4-984f-4c68-9465-a8d3f2d1d21f tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508653, 'name': PowerOffVM_Task, 'duration_secs': 0.250089} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.976408] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bfb88a4-984f-4c68-9465-a8d3f2d1d21f tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1021.976584] env[69475]: DEBUG nova.compute.manager [None req-8bfb88a4-984f-4c68-9465-a8d3f2d1d21f tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1021.977371] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70962531-746e-4b37-8c9a-b893a9d51fe6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.985350] env[69475]: DEBUG nova.scheduler.client.report [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1022.043350] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508654, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.491598] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.366s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.492113] env[69475]: DEBUG nova.compute.manager [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1022.494787] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8bfb88a4-984f-4c68-9465-a8d3f2d1d21f tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "8963b50c-29ca-49fd-8289-1e1b7583ca25" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.083s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.496201] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.725s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.497107] env[69475]: INFO nova.compute.claims [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1022.543943] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508654, 'name': ReconfigVM_Task, 'duration_secs': 0.782533} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.544300] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Reconfigured VM instance instance-0000005c to attach disk [datastore1] 44bcaa36-ecd9-448b-b589-7c32066ede1d/44bcaa36-ecd9-448b-b589-7c32066ede1d.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1022.545310] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4484d120-623f-4c90-b48b-e260bdf95e11 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.552780] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1022.552780] env[69475]: value = "task-3508655" [ 1022.552780] env[69475]: _type = "Task" [ 1022.552780] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.560780] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508655, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.001264] env[69475]: DEBUG nova.compute.utils [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1023.005111] env[69475]: DEBUG nova.compute.manager [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1023.005332] env[69475]: DEBUG nova.network.neutron [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1023.038707] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "4f091501-351c-45b8-9f64-4d28d4623df8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.039117] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "4f091501-351c-45b8-9f64-4d28d4623df8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.052615] env[69475]: DEBUG nova.policy [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a007f7a31e5a4c0eb07bd8bf5d26cf2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a68f54aa603f46468f50c83cd4fa3e8c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1023.064405] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508655, 'name': Rename_Task, 'duration_secs': 0.199254} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.064701] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1023.064996] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bfd45f7a-4d65-43e5-94e7-0a98062ad481 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.071561] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1023.071561] env[69475]: value = "task-3508656" [ 1023.071561] env[69475]: _type = "Task" [ 1023.071561] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.079184] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508656, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.341597] env[69475]: DEBUG nova.network.neutron [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Successfully created port: 72e7aa25-953c-4253-8e6e-6543fd67af89 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1023.439552] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c031b0-76ad-4199-84cb-6e68d1b001fe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.457219] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updating instance '82236043-3222-4134-8717-4c239ed12aba' progress to 0 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1023.508533] env[69475]: DEBUG nova.compute.manager [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1023.542851] env[69475]: DEBUG nova.compute.manager [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1023.583112] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508656, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.670303] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "8963b50c-29ca-49fd-8289-1e1b7583ca25" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.670562] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "8963b50c-29ca-49fd-8289-1e1b7583ca25" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.670766] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "8963b50c-29ca-49fd-8289-1e1b7583ca25-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.671285] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "8963b50c-29ca-49fd-8289-1e1b7583ca25-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1023.671285] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "8963b50c-29ca-49fd-8289-1e1b7583ca25-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.675107] env[69475]: INFO nova.compute.manager [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Terminating instance [ 1023.792406] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac59a61f-7bd3-4981-9ac4-f1f744672acd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.801913] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fc32419-d2fc-4025-bd36-4b4454886fd9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.849568] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d3914d-a562-400b-8edc-55d7b8728776 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.857253] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8659cf-012e-4a0a-814c-471694ac34bd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.870509] env[69475]: DEBUG nova.compute.provider_tree [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.964344] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1023.964344] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a14b6818-5e38-4f0f-b54f-e9282ff36c06 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.970470] env[69475]: DEBUG oslo_vmware.api [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1023.970470] env[69475]: value = "task-3508657" [ 1023.970470] env[69475]: _type = "Task" [ 1023.970470] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.980523] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] VM already powered off {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1023.980799] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updating instance '82236043-3222-4134-8717-4c239ed12aba' progress to 17 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1024.062970] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.083096] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508656, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.178969] env[69475]: DEBUG nova.compute.manager [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1024.179270] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1024.180151] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580edd6d-436c-4daa-93c0-abc944fa23e4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.187904] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1024.188147] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70f94278-61fe-4e13-ad87-8909f06d481e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.279908] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1024.280231] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1024.280486] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleting the datastore file [datastore1] 8963b50c-29ca-49fd-8289-1e1b7583ca25 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1024.280757] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebdae284-3cee-445c-9e29-189dd6340985 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.286686] env[69475]: DEBUG oslo_vmware.api [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1024.286686] env[69475]: value = "task-3508659" [ 1024.286686] env[69475]: _type = "Task" [ 1024.286686] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.294820] env[69475]: DEBUG oslo_vmware.api [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508659, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.373599] env[69475]: DEBUG nova.scheduler.client.report [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1024.490636] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1024.490636] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1024.490822] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1024.490850] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1024.491018] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1024.491201] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1024.491450] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1024.491719] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1024.492017] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1024.492303] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1024.492600] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1024.497953] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7934f33-d852-4bb0-824b-882f42bdbac5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.513731] env[69475]: DEBUG oslo_vmware.api [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1024.513731] env[69475]: value = "task-3508660" [ 1024.513731] env[69475]: _type = "Task" [ 1024.513731] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.522401] env[69475]: DEBUG nova.compute.manager [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1024.524282] env[69475]: DEBUG oslo_vmware.api [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508660, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.548619] env[69475]: DEBUG nova.virt.hardware [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1024.548865] env[69475]: DEBUG nova.virt.hardware [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1024.549030] env[69475]: DEBUG nova.virt.hardware [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1024.549215] env[69475]: DEBUG nova.virt.hardware [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1024.549363] env[69475]: DEBUG nova.virt.hardware [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1024.549509] env[69475]: DEBUG nova.virt.hardware [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1024.549718] env[69475]: DEBUG nova.virt.hardware [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1024.549874] env[69475]: DEBUG nova.virt.hardware [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1024.550051] env[69475]: DEBUG nova.virt.hardware [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1024.550217] env[69475]: DEBUG nova.virt.hardware [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1024.550388] env[69475]: DEBUG nova.virt.hardware [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1024.551227] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-103cf47b-351d-4e7b-ad90-296f15d733a2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.558430] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1207d25e-b7f1-45ba-86cc-24d5f6e19c01 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.582702] env[69475]: DEBUG oslo_vmware.api [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508656, 'name': PowerOnVM_Task, 'duration_secs': 1.036922} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.583015] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1024.583257] env[69475]: INFO nova.compute.manager [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Took 5.85 seconds to spawn the instance on the hypervisor. [ 1024.583488] env[69475]: DEBUG nova.compute.manager [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1024.584273] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9187515e-68ba-4320-ae90-a7bb6306ee1f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.737032] env[69475]: DEBUG nova.compute.manager [req-87cb62b9-8895-4a98-8da0-5ca11012260b req-cb76c235-2820-4fe6-a5df-0987b4ac8f11 service nova] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Received event network-vif-plugged-72e7aa25-953c-4253-8e6e-6543fd67af89 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1024.737032] env[69475]: DEBUG oslo_concurrency.lockutils [req-87cb62b9-8895-4a98-8da0-5ca11012260b req-cb76c235-2820-4fe6-a5df-0987b4ac8f11 service nova] Acquiring lock "4100fb43-1dae-40b1-8caa-11dd67962274-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.737032] env[69475]: DEBUG oslo_concurrency.lockutils [req-87cb62b9-8895-4a98-8da0-5ca11012260b req-cb76c235-2820-4fe6-a5df-0987b4ac8f11 service nova] Lock "4100fb43-1dae-40b1-8caa-11dd67962274-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.737032] env[69475]: DEBUG oslo_concurrency.lockutils [req-87cb62b9-8895-4a98-8da0-5ca11012260b req-cb76c235-2820-4fe6-a5df-0987b4ac8f11 service nova] Lock "4100fb43-1dae-40b1-8caa-11dd67962274-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.737032] env[69475]: DEBUG nova.compute.manager [req-87cb62b9-8895-4a98-8da0-5ca11012260b req-cb76c235-2820-4fe6-a5df-0987b4ac8f11 service nova] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] No waiting events found dispatching network-vif-plugged-72e7aa25-953c-4253-8e6e-6543fd67af89 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1024.737032] env[69475]: WARNING nova.compute.manager [req-87cb62b9-8895-4a98-8da0-5ca11012260b req-cb76c235-2820-4fe6-a5df-0987b4ac8f11 service nova] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Received unexpected event network-vif-plugged-72e7aa25-953c-4253-8e6e-6543fd67af89 for instance with vm_state building and task_state spawning. [ 1024.799233] env[69475]: DEBUG oslo_vmware.api [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508659, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226111} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.799497] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1024.799654] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1024.799832] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1024.800012] env[69475]: INFO nova.compute.manager [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1024.800260] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1024.800450] env[69475]: DEBUG nova.compute.manager [-] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1024.800525] env[69475]: DEBUG nova.network.neutron [-] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1024.825869] env[69475]: DEBUG nova.network.neutron [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Successfully updated port: 72e7aa25-953c-4253-8e6e-6543fd67af89 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1024.878604] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.383s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.879158] env[69475]: DEBUG nova.compute.manager [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1024.884027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.886s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.884358] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.886731] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.638s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.888271] env[69475]: INFO nova.compute.claims [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1024.912928] env[69475]: INFO nova.scheduler.client.report [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleted allocations for instance e8657a44-d786-4fa6-b39c-28fc71415ce8 [ 1025.023263] env[69475]: DEBUG oslo_vmware.api [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508660, 'name': ReconfigVM_Task, 'duration_secs': 0.179661} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.023609] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updating instance '82236043-3222-4134-8717-4c239ed12aba' progress to 33 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1025.103066] env[69475]: INFO nova.compute.manager [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Took 36.15 seconds to build instance. [ 1025.329333] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "refresh_cache-4100fb43-1dae-40b1-8caa-11dd67962274" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.329499] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired lock "refresh_cache-4100fb43-1dae-40b1-8caa-11dd67962274" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1025.329656] env[69475]: DEBUG nova.network.neutron [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1025.383482] env[69475]: DEBUG nova.compute.manager [req-02f20313-13e6-425f-b036-ee53908eb65c req-40bb946c-809f-4948-9027-ed2bc70465cf service nova] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Received event network-vif-deleted-f3242024-4636-460a-a57d-a89bdca2e37c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1025.383565] env[69475]: INFO nova.compute.manager [req-02f20313-13e6-425f-b036-ee53908eb65c req-40bb946c-809f-4948-9027-ed2bc70465cf service nova] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Neutron deleted interface f3242024-4636-460a-a57d-a89bdca2e37c; detaching it from the instance and deleting it from the info cache [ 1025.383806] env[69475]: DEBUG nova.network.neutron [req-02f20313-13e6-425f-b036-ee53908eb65c req-40bb946c-809f-4948-9027-ed2bc70465cf service nova] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.386870] env[69475]: DEBUG nova.compute.utils [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1025.388165] env[69475]: DEBUG nova.compute.manager [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1025.388326] env[69475]: DEBUG nova.network.neutron [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1025.421576] env[69475]: DEBUG oslo_concurrency.lockutils [None req-dde8d479-ff66-443b-9791-89ffbd5ad987 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "e8657a44-d786-4fa6-b39c-28fc71415ce8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.852s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.448770] env[69475]: DEBUG nova.policy [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a007f7a31e5a4c0eb07bd8bf5d26cf2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a68f54aa603f46468f50c83cd4fa3e8c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1025.530978] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1025.532641] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1025.532641] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1025.532641] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1025.532641] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1025.532641] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1025.532641] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1025.532641] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1025.533290] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1025.533290] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1025.533290] env[69475]: DEBUG nova.virt.hardware [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1025.538795] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Reconfiguring VM instance instance-00000044 to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1025.539143] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74895ba2-129b-4ff8-aed6-5415eb036d0b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.560258] env[69475]: DEBUG oslo_vmware.api [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1025.560258] env[69475]: value = "task-3508661" [ 1025.560258] env[69475]: _type = "Task" [ 1025.560258] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.569916] env[69475]: DEBUG oslo_vmware.api [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508661, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.605710] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb213d13-89bd-4124-bb0b-339613d7a16b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Lock "44bcaa36-ecd9-448b-b589-7c32066ede1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.666s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1025.869080] env[69475]: DEBUG nova.network.neutron [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1025.871465] env[69475]: DEBUG nova.network.neutron [-] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.888923] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-15f5e7cf-2ae9-479c-ac04-1d948568605f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.891215] env[69475]: DEBUG nova.compute.manager [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1025.900848] env[69475]: DEBUG nova.network.neutron [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Successfully created port: 24efd80f-72cd-4c40-962a-103b1ca55a1f {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1025.905756] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f79fce6a-b3b0-41c8-960f-1a00785154d9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.947064] env[69475]: DEBUG nova.compute.manager [req-02f20313-13e6-425f-b036-ee53908eb65c req-40bb946c-809f-4948-9027-ed2bc70465cf service nova] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Detach interface failed, port_id=f3242024-4636-460a-a57d-a89bdca2e37c, reason: Instance 8963b50c-29ca-49fd-8289-1e1b7583ca25 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1026.046438] env[69475]: DEBUG nova.network.neutron [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Updating instance_info_cache with network_info: [{"id": "72e7aa25-953c-4253-8e6e-6543fd67af89", "address": "fa:16:3e:a2:e8:60", "network": {"id": "a5ecc342-ff26-4d68-9810-30407b73463d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-941429832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a68f54aa603f46468f50c83cd4fa3e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72e7aa25-95", "ovs_interfaceid": "72e7aa25-953c-4253-8e6e-6543fd67af89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.075543] env[69475]: DEBUG oslo_vmware.api [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508661, 'name': ReconfigVM_Task, 'duration_secs': 0.161998} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.075741] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Reconfigured VM instance instance-00000044 to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1026.076606] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a03f418e-ba78-45b6-912e-feb8c9be5b0c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.101040] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] 82236043-3222-4134-8717-4c239ed12aba/82236043-3222-4134-8717-4c239ed12aba.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1026.103763] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7126047-b0b6-442c-9ce2-baac5679b465 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.122135] env[69475]: DEBUG oslo_vmware.api [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1026.122135] env[69475]: value = "task-3508662" [ 1026.122135] env[69475]: _type = "Task" [ 1026.122135] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.129958] env[69475]: DEBUG oslo_vmware.api [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508662, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.243826] env[69475]: INFO nova.compute.manager [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Rebuilding instance [ 1026.264449] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf01eec-5bee-4db8-ae90-f371a29ea6d2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.281768] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a00036-f1f9-4652-aefa-b57444b91a05 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.318937] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c1f960-5323-4d17-ab73-51777320f0ba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.322232] env[69475]: DEBUG nova.compute.manager [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1026.323035] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8903fa2-e84c-4b65-866f-ff6342852a9a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.332073] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e35bec6-7f90-4d80-8bf9-af48a9c0bddd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.348332] env[69475]: DEBUG nova.compute.provider_tree [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.374210] env[69475]: INFO nova.compute.manager [-] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Took 1.57 seconds to deallocate network for instance. [ 1026.549416] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Releasing lock "refresh_cache-4100fb43-1dae-40b1-8caa-11dd67962274" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1026.549762] env[69475]: DEBUG nova.compute.manager [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Instance network_info: |[{"id": "72e7aa25-953c-4253-8e6e-6543fd67af89", "address": "fa:16:3e:a2:e8:60", "network": {"id": "a5ecc342-ff26-4d68-9810-30407b73463d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-941429832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a68f54aa603f46468f50c83cd4fa3e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72e7aa25-95", "ovs_interfaceid": "72e7aa25-953c-4253-8e6e-6543fd67af89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1026.550219] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a2:e8:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '84aee122-f630-43c5-9cc1-3a38d3819c82', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '72e7aa25-953c-4253-8e6e-6543fd67af89', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1026.557779] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Creating folder: Project (a68f54aa603f46468f50c83cd4fa3e8c). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1026.558463] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2759ac8-f31f-4ded-a88d-4c3c86cf0938 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.570439] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Created folder: Project (a68f54aa603f46468f50c83cd4fa3e8c) in parent group-v700823. [ 1026.570627] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Creating folder: Instances. Parent ref: group-v701085. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1026.570862] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0627375d-94f3-4a04-9f6a-45b575c20f2b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.580416] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Created folder: Instances in parent group-v701085. [ 1026.580706] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1026.580955] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1026.581235] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-380c960e-257b-4420-bb4e-0012f4181298 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.601453] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1026.601453] env[69475]: value = "task-3508665" [ 1026.601453] env[69475]: _type = "Task" [ 1026.601453] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.609017] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508665, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.632668] env[69475]: DEBUG oslo_vmware.api [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508662, 'name': ReconfigVM_Task, 'duration_secs': 0.273294} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.632970] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Reconfigured VM instance instance-00000044 to attach disk [datastore1] 82236043-3222-4134-8717-4c239ed12aba/82236043-3222-4134-8717-4c239ed12aba.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1026.633310] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updating instance '82236043-3222-4134-8717-4c239ed12aba' progress to 50 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1026.803754] env[69475]: DEBUG nova.compute.manager [req-36aded15-35a8-46ef-a998-6d610c534181 req-4c02c6b2-5b70-4968-9738-f5d471bfce10 service nova] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Received event network-changed-72e7aa25-953c-4253-8e6e-6543fd67af89 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1026.803957] env[69475]: DEBUG nova.compute.manager [req-36aded15-35a8-46ef-a998-6d610c534181 req-4c02c6b2-5b70-4968-9738-f5d471bfce10 service nova] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Refreshing instance network info cache due to event network-changed-72e7aa25-953c-4253-8e6e-6543fd67af89. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1026.804263] env[69475]: DEBUG oslo_concurrency.lockutils [req-36aded15-35a8-46ef-a998-6d610c534181 req-4c02c6b2-5b70-4968-9738-f5d471bfce10 service nova] Acquiring lock "refresh_cache-4100fb43-1dae-40b1-8caa-11dd67962274" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.804457] env[69475]: DEBUG oslo_concurrency.lockutils [req-36aded15-35a8-46ef-a998-6d610c534181 req-4c02c6b2-5b70-4968-9738-f5d471bfce10 service nova] Acquired lock "refresh_cache-4100fb43-1dae-40b1-8caa-11dd67962274" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.804671] env[69475]: DEBUG nova.network.neutron [req-36aded15-35a8-46ef-a998-6d610c534181 req-4c02c6b2-5b70-4968-9738-f5d471bfce10 service nova] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Refreshing network info cache for port 72e7aa25-953c-4253-8e6e-6543fd67af89 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1026.850983] env[69475]: DEBUG nova.scheduler.client.report [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1026.858715] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "ecf115fc-4ca1-41e2-ac42-82ec8154356e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1026.859507] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "ecf115fc-4ca1-41e2-ac42-82ec8154356e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1026.881875] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1026.903080] env[69475]: DEBUG nova.compute.manager [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1026.931928] env[69475]: DEBUG nova.virt.hardware [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1026.932307] env[69475]: DEBUG nova.virt.hardware [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1026.932426] env[69475]: DEBUG nova.virt.hardware [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1026.932622] env[69475]: DEBUG nova.virt.hardware [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1026.932769] env[69475]: DEBUG nova.virt.hardware [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1026.932899] env[69475]: DEBUG nova.virt.hardware [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1026.933291] env[69475]: DEBUG nova.virt.hardware [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1026.933291] env[69475]: DEBUG nova.virt.hardware [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1026.933435] env[69475]: DEBUG nova.virt.hardware [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1026.933800] env[69475]: DEBUG nova.virt.hardware [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1026.934019] env[69475]: DEBUG nova.virt.hardware [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1026.934949] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20124200-fca2-44af-8545-29e5600e07a4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.943747] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c64d7df-c2b7-47e9-98dd-c99bb0a82580 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.111720] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508665, 'name': CreateVM_Task, 'duration_secs': 0.328741} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.111975] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1027.112571] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.112741] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.113061] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1027.113311] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37eca091-48c2-458c-94be-f5ba1fe90d9f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.118173] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1027.118173] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e5deae-cada-0c9b-662b-e5da971dbfa7" [ 1027.118173] env[69475]: _type = "Task" [ 1027.118173] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.126202] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e5deae-cada-0c9b-662b-e5da971dbfa7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.140069] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ce81a5-2709-4d90-b603-6e4da97ae310 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.158572] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa74a76-ef8e-4ee1-b89a-223e2dc51639 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.178226] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updating instance '82236043-3222-4134-8717-4c239ed12aba' progress to 67 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1027.342309] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1027.343394] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b14113f-8a9b-4e16-9f3e-223af1aece22 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.350585] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1027.350585] env[69475]: value = "task-3508666" [ 1027.350585] env[69475]: _type = "Task" [ 1027.350585] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.359751] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.473s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.360456] env[69475]: DEBUG nova.compute.manager [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1027.363761] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508666, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.365027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.968s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.365443] env[69475]: INFO nova.compute.claims [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1027.368114] env[69475]: DEBUG nova.compute.manager [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1027.505947] env[69475]: DEBUG nova.network.neutron [req-36aded15-35a8-46ef-a998-6d610c534181 req-4c02c6b2-5b70-4968-9738-f5d471bfce10 service nova] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Updated VIF entry in instance network info cache for port 72e7aa25-953c-4253-8e6e-6543fd67af89. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1027.506333] env[69475]: DEBUG nova.network.neutron [req-36aded15-35a8-46ef-a998-6d610c534181 req-4c02c6b2-5b70-4968-9738-f5d471bfce10 service nova] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Updating instance_info_cache with network_info: [{"id": "72e7aa25-953c-4253-8e6e-6543fd67af89", "address": "fa:16:3e:a2:e8:60", "network": {"id": "a5ecc342-ff26-4d68-9810-30407b73463d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-941429832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a68f54aa603f46468f50c83cd4fa3e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72e7aa25-95", "ovs_interfaceid": "72e7aa25-953c-4253-8e6e-6543fd67af89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.628889] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e5deae-cada-0c9b-662b-e5da971dbfa7, 'name': SearchDatastore_Task, 'duration_secs': 0.00969} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.629779] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.630099] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1027.630525] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.630623] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.630867] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1027.631196] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fcfd688c-ac10-40ef-9f29-8dcda0e5e18d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.639771] env[69475]: DEBUG nova.compute.manager [req-a3541766-b569-4ba0-b161-36b71c771023 req-96fc0703-43f3-4c40-82fb-039d9b3978df service nova] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Received event network-vif-plugged-24efd80f-72cd-4c40-962a-103b1ca55a1f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1027.639996] env[69475]: DEBUG oslo_concurrency.lockutils [req-a3541766-b569-4ba0-b161-36b71c771023 req-96fc0703-43f3-4c40-82fb-039d9b3978df service nova] Acquiring lock "f8a82046-4589-45d2-a7a3-466fe4d8f9c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.640204] env[69475]: DEBUG oslo_concurrency.lockutils [req-a3541766-b569-4ba0-b161-36b71c771023 req-96fc0703-43f3-4c40-82fb-039d9b3978df service nova] Lock "f8a82046-4589-45d2-a7a3-466fe4d8f9c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1027.640367] env[69475]: DEBUG oslo_concurrency.lockutils [req-a3541766-b569-4ba0-b161-36b71c771023 req-96fc0703-43f3-4c40-82fb-039d9b3978df service nova] Lock "f8a82046-4589-45d2-a7a3-466fe4d8f9c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1027.640530] env[69475]: DEBUG nova.compute.manager [req-a3541766-b569-4ba0-b161-36b71c771023 req-96fc0703-43f3-4c40-82fb-039d9b3978df service nova] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] No waiting events found dispatching network-vif-plugged-24efd80f-72cd-4c40-962a-103b1ca55a1f {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1027.640728] env[69475]: WARNING nova.compute.manager [req-a3541766-b569-4ba0-b161-36b71c771023 req-96fc0703-43f3-4c40-82fb-039d9b3978df service nova] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Received unexpected event network-vif-plugged-24efd80f-72cd-4c40-962a-103b1ca55a1f for instance with vm_state building and task_state spawning. [ 1027.644067] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1027.644312] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1027.645421] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8314f20c-e957-48ba-af8f-4801ee110ea4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.651443] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1027.651443] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520d2b21-b1ad-42cc-945f-67d0804b3568" [ 1027.651443] env[69475]: _type = "Task" [ 1027.651443] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.661796] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520d2b21-b1ad-42cc-945f-67d0804b3568, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.719666] env[69475]: DEBUG nova.network.neutron [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Port 91ad3911-8ea3-4bb6-bcf5-fd800e27e57f binding to destination host cpu-1 is already ACTIVE {{(pid=69475) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1027.751567] env[69475]: DEBUG nova.network.neutron [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Successfully updated port: 24efd80f-72cd-4c40-962a-103b1ca55a1f {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1027.860308] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508666, 'name': PowerOffVM_Task, 'duration_secs': 0.119934} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.860500] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1027.861166] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1027.861924] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c4a849-4bae-4efc-8cb0-50a6fadc9a9f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.868359] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1027.868580] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-679398d4-aa86-4940-9662-92f9a184e18d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.874123] env[69475]: DEBUG nova.compute.utils [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1027.880050] env[69475]: DEBUG nova.compute.manager [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1027.880050] env[69475]: DEBUG nova.network.neutron [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1027.898150] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1027.936782] env[69475]: DEBUG nova.policy [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc345af1a2c34fba98fa191b637a284a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2ba1a4125454d39bc92b6123447d98a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1027.942080] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1027.942464] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1027.942754] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Deleting the datastore file [datastore1] 44bcaa36-ecd9-448b-b589-7c32066ede1d {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1027.943154] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d7f2c0ca-93d0-40c0-a8ed-14344a539e7c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.950760] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1027.950760] env[69475]: value = "task-3508668" [ 1027.950760] env[69475]: _type = "Task" [ 1027.950760] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.964754] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508668, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.009341] env[69475]: DEBUG oslo_concurrency.lockutils [req-36aded15-35a8-46ef-a998-6d610c534181 req-4c02c6b2-5b70-4968-9738-f5d471bfce10 service nova] Releasing lock "refresh_cache-4100fb43-1dae-40b1-8caa-11dd67962274" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.162257] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520d2b21-b1ad-42cc-945f-67d0804b3568, 'name': SearchDatastore_Task, 'duration_secs': 0.008694} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.163084] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25b9637b-be61-4444-a971-5c73fd626f37 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.168782] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1028.168782] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52bba562-32f3-4ecc-6927-11ffd72079ba" [ 1028.168782] env[69475]: _type = "Task" [ 1028.168782] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.176806] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52bba562-32f3-4ecc-6927-11ffd72079ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.254714] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "refresh_cache-f8a82046-4589-45d2-a7a3-466fe4d8f9c6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.254973] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired lock "refresh_cache-f8a82046-4589-45d2-a7a3-466fe4d8f9c6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1028.255468] env[69475]: DEBUG nova.network.neutron [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1028.378684] env[69475]: DEBUG nova.compute.manager [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1028.390838] env[69475]: DEBUG nova.network.neutron [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Successfully created port: 9d51ee71-8419-4657-9a34-44bec2faf3c2 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1028.464950] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508668, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.096041} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.465335] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1028.465617] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1028.465910] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1028.679169] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52bba562-32f3-4ecc-6927-11ffd72079ba, 'name': SearchDatastore_Task, 'duration_secs': 0.010344} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.680187] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.680441] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 4100fb43-1dae-40b1-8caa-11dd67962274/4100fb43-1dae-40b1-8caa-11dd67962274.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1028.681177] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fca6d8b-d501-4063-aff8-008c30ab16c8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.683642] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b19f120b-6215-4614-8323-58724c76ad6f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.689782] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf655ea7-b055-4dea-b82e-921f16f222a3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.695913] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1028.695913] env[69475]: value = "task-3508669" [ 1028.695913] env[69475]: _type = "Task" [ 1028.695913] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.725536] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-277267aa-ba0c-48f1-b13f-dbbd332a95c5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.738915] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508669, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.745920] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "82236043-3222-4134-8717-4c239ed12aba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1028.746108] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "82236043-3222-4134-8717-4c239ed12aba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.746302] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "82236043-3222-4134-8717-4c239ed12aba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.750953] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0cb7375-c10a-4ccf-8242-a49f70f42154 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.768724] env[69475]: DEBUG nova.compute.provider_tree [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1028.799418] env[69475]: DEBUG nova.network.neutron [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1028.969789] env[69475]: DEBUG nova.network.neutron [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Updating instance_info_cache with network_info: [{"id": "24efd80f-72cd-4c40-962a-103b1ca55a1f", "address": "fa:16:3e:52:a2:60", "network": {"id": "a5ecc342-ff26-4d68-9810-30407b73463d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-941429832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a68f54aa603f46468f50c83cd4fa3e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24efd80f-72", "ovs_interfaceid": "24efd80f-72cd-4c40-962a-103b1ca55a1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.206660] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508669, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.460432} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.206939] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 4100fb43-1dae-40b1-8caa-11dd67962274/4100fb43-1dae-40b1-8caa-11dd67962274.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1029.207065] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1029.207313] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-17cad9b5-509b-4bac-a169-c0e10c82478c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.214215] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1029.214215] env[69475]: value = "task-3508670" [ 1029.214215] env[69475]: _type = "Task" [ 1029.214215] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.221389] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508670, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.276186] env[69475]: DEBUG nova.scheduler.client.report [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1029.394013] env[69475]: DEBUG nova.compute.manager [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1029.421337] env[69475]: DEBUG nova.virt.hardware [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1029.421486] env[69475]: DEBUG nova.virt.hardware [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1029.421635] env[69475]: DEBUG nova.virt.hardware [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1029.421820] env[69475]: DEBUG nova.virt.hardware [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1029.421968] env[69475]: DEBUG nova.virt.hardware [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1029.422126] env[69475]: DEBUG nova.virt.hardware [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1029.422338] env[69475]: DEBUG nova.virt.hardware [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1029.422498] env[69475]: DEBUG nova.virt.hardware [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1029.422726] env[69475]: DEBUG nova.virt.hardware [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1029.422925] env[69475]: DEBUG nova.virt.hardware [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1029.423274] env[69475]: DEBUG nova.virt.hardware [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1029.424019] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5aa3dd3-1885-463d-93ac-1d0a0bdace1a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.432430] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0172e787-2907-453e-a78e-1d5e0c0da6ac {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.475589] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Releasing lock "refresh_cache-f8a82046-4589-45d2-a7a3-466fe4d8f9c6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1029.475928] env[69475]: DEBUG nova.compute.manager [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Instance network_info: |[{"id": "24efd80f-72cd-4c40-962a-103b1ca55a1f", "address": "fa:16:3e:52:a2:60", "network": {"id": "a5ecc342-ff26-4d68-9810-30407b73463d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-941429832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a68f54aa603f46468f50c83cd4fa3e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24efd80f-72", "ovs_interfaceid": "24efd80f-72cd-4c40-962a-103b1ca55a1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1029.478980] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:a2:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '84aee122-f630-43c5-9cc1-3a38d3819c82', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24efd80f-72cd-4c40-962a-103b1ca55a1f', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1029.488291] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1029.488500] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1029.488726] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f61d67d6-3ee9-4b3d-b024-6b3001b06fdf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.510447] env[69475]: DEBUG nova.virt.hardware [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1029.510725] env[69475]: DEBUG nova.virt.hardware [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1029.510922] env[69475]: DEBUG nova.virt.hardware [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1029.511153] env[69475]: DEBUG nova.virt.hardware [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1029.511337] env[69475]: DEBUG nova.virt.hardware [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1029.511654] env[69475]: DEBUG nova.virt.hardware [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1029.511905] env[69475]: DEBUG nova.virt.hardware [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1029.512111] env[69475]: DEBUG nova.virt.hardware [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1029.512322] env[69475]: DEBUG nova.virt.hardware [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1029.512520] env[69475]: DEBUG nova.virt.hardware [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1029.512726] env[69475]: DEBUG nova.virt.hardware [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1029.513614] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a56132be-0a32-4e8a-a182-d863dd24c62b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.516943] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1029.516943] env[69475]: value = "task-3508671" [ 1029.516943] env[69475]: _type = "Task" [ 1029.516943] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.523363] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa6b56be-dd6d-4c57-a85b-85274b5f7a1a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.530138] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508671, 'name': CreateVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.541447] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Instance VIF info [] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1029.547286] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1029.547554] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1029.547769] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f55be72-ca46-44ab-97d8-62e8c77ffcdc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.564977] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1029.564977] env[69475]: value = "task-3508672" [ 1029.564977] env[69475]: _type = "Task" [ 1029.564977] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.572120] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508672, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.675869] env[69475]: DEBUG nova.compute.manager [req-ab146759-8e2a-4be1-888e-687db3ebf0c2 req-2894fce7-6567-4482-a588-d5b6aabdba23 service nova] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Received event network-changed-24efd80f-72cd-4c40-962a-103b1ca55a1f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1029.676095] env[69475]: DEBUG nova.compute.manager [req-ab146759-8e2a-4be1-888e-687db3ebf0c2 req-2894fce7-6567-4482-a588-d5b6aabdba23 service nova] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Refreshing instance network info cache due to event network-changed-24efd80f-72cd-4c40-962a-103b1ca55a1f. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1029.676341] env[69475]: DEBUG oslo_concurrency.lockutils [req-ab146759-8e2a-4be1-888e-687db3ebf0c2 req-2894fce7-6567-4482-a588-d5b6aabdba23 service nova] Acquiring lock "refresh_cache-f8a82046-4589-45d2-a7a3-466fe4d8f9c6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.676496] env[69475]: DEBUG oslo_concurrency.lockutils [req-ab146759-8e2a-4be1-888e-687db3ebf0c2 req-2894fce7-6567-4482-a588-d5b6aabdba23 service nova] Acquired lock "refresh_cache-f8a82046-4589-45d2-a7a3-466fe4d8f9c6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1029.676663] env[69475]: DEBUG nova.network.neutron [req-ab146759-8e2a-4be1-888e-687db3ebf0c2 req-2894fce7-6567-4482-a588-d5b6aabdba23 service nova] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Refreshing network info cache for port 24efd80f-72cd-4c40-962a-103b1ca55a1f {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1029.724096] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508670, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062203} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.724397] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1029.725251] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef4b44ac-bdfe-4c0a-8265-19a56c83bebe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.747169] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 4100fb43-1dae-40b1-8caa-11dd67962274/4100fb43-1dae-40b1-8caa-11dd67962274.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1029.747836] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e2314ab6-2afe-4ec6-9faf-5f44b3f1a769 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.770517] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1029.770517] env[69475]: value = "task-3508673" [ 1029.770517] env[69475]: _type = "Task" [ 1029.770517] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.778801] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508673, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.781635] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.418s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.782131] env[69475]: DEBUG nova.compute.manager [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1029.784618] env[69475]: DEBUG oslo_concurrency.lockutils [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.329s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.784804] env[69475]: DEBUG oslo_concurrency.lockutils [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.786820] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.587s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.786964] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.788804] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.217s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.788904] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.790574] env[69475]: DEBUG oslo_concurrency.lockutils [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 13.541s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1029.790804] env[69475]: DEBUG nova.objects.instance [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69475) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1029.796397] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1029.796604] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1029.796814] env[69475]: DEBUG nova.network.neutron [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1029.822752] env[69475]: INFO nova.scheduler.client.report [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Deleted allocations for instance f40aa0bb-af1d-4f8f-a906-f1c83307b465 [ 1029.828168] env[69475]: INFO nova.scheduler.client.report [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Deleted allocations for instance 4b3b53d1-82bf-40e7-9988-af7b51e9883a [ 1029.846684] env[69475]: INFO nova.scheduler.client.report [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleted allocations for instance b8c50d0a-4b3d-4b70-9bd6-8304fa128e59 [ 1030.028312] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508671, 'name': CreateVM_Task, 'duration_secs': 0.384617} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.028651] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1030.030604] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.030604] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1030.030604] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1030.030725] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c04b35d1-9cf1-4058-838c-785d64138cb3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.035958] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1030.035958] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52711cd5-801f-3592-861a-e18114e9ef75" [ 1030.035958] env[69475]: _type = "Task" [ 1030.035958] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.044395] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52711cd5-801f-3592-861a-e18114e9ef75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.076661] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508672, 'name': CreateVM_Task, 'duration_secs': 0.301847} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.076790] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1030.077124] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.243015] env[69475]: DEBUG nova.network.neutron [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Successfully updated port: 9d51ee71-8419-4657-9a34-44bec2faf3c2 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1030.286777] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508673, 'name': ReconfigVM_Task, 'duration_secs': 0.354736} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.287066] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 4100fb43-1dae-40b1-8caa-11dd67962274/4100fb43-1dae-40b1-8caa-11dd67962274.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1030.287680] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e122f290-d724-43c2-82a4-fc9289b97895 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.293717] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1030.293717] env[69475]: value = "task-3508674" [ 1030.293717] env[69475]: _type = "Task" [ 1030.293717] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.294933] env[69475]: DEBUG nova.compute.utils [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1030.303240] env[69475]: DEBUG nova.compute.manager [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1030.303410] env[69475]: DEBUG nova.network.neutron [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1030.317716] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508674, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.338566] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a2747584-b5e3-4749-b920-40546482c1c4 tempest-MigrationsAdminTest-286006226 tempest-MigrationsAdminTest-286006226-project-member] Lock "4b3b53d1-82bf-40e7-9988-af7b51e9883a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.071s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.339930] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ad07eb76-caae-4e0c-8cd0-e67c8d3df148 tempest-ListServerFiltersTestJSON-2013984309 tempest-ListServerFiltersTestJSON-2013984309-project-member] Lock "f40aa0bb-af1d-4f8f-a906-f1c83307b465" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.342s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.358597] env[69475]: DEBUG oslo_concurrency.lockutils [None req-26c821ea-d4fa-438e-9dcb-cb84d0b5becc tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "b8c50d0a-4b3d-4b70-9bd6-8304fa128e59" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.447s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.407834] env[69475]: DEBUG nova.policy [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '82f6c3724a2b4430b8df87655ff91c63', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1073981d0d7740e78805798e02ff9d55', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1030.419446] env[69475]: DEBUG nova.network.neutron [req-ab146759-8e2a-4be1-888e-687db3ebf0c2 req-2894fce7-6567-4482-a588-d5b6aabdba23 service nova] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Updated VIF entry in instance network info cache for port 24efd80f-72cd-4c40-962a-103b1ca55a1f. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1030.419796] env[69475]: DEBUG nova.network.neutron [req-ab146759-8e2a-4be1-888e-687db3ebf0c2 req-2894fce7-6567-4482-a588-d5b6aabdba23 service nova] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Updating instance_info_cache with network_info: [{"id": "24efd80f-72cd-4c40-962a-103b1ca55a1f", "address": "fa:16:3e:52:a2:60", "network": {"id": "a5ecc342-ff26-4d68-9810-30407b73463d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-941429832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a68f54aa603f46468f50c83cd4fa3e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24efd80f-72", "ovs_interfaceid": "24efd80f-72cd-4c40-962a-103b1ca55a1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.547712] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52711cd5-801f-3592-861a-e18114e9ef75, 'name': SearchDatastore_Task, 'duration_secs': 0.009572} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.548146] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.548406] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1030.548640] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.548787] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1030.548963] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1030.549267] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1030.549682] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1030.549787] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-95383084-a1ff-409d-abfc-0978d7e216ba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.551632] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65d0fab1-3187-49bc-b17a-b4cae6fe736a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.556444] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1030.556444] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f21d48-63fd-8631-1a66-f7ee0a81510a" [ 1030.556444] env[69475]: _type = "Task" [ 1030.556444] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.560672] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1030.560849] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1030.561914] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a801e49-e64d-4b45-9f31-6c4509a4a13a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.580854] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f21d48-63fd-8631-1a66-f7ee0a81510a, 'name': SearchDatastore_Task, 'duration_secs': 0.008006} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.581672] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1030.583042] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1030.583042] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.584155] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1030.584155] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5243179b-5728-f77f-c292-3ed4f6c4730c" [ 1030.584155] env[69475]: _type = "Task" [ 1030.584155] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.596335] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5243179b-5728-f77f-c292-3ed4f6c4730c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.745502] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.745655] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1030.746063] env[69475]: DEBUG nova.network.neutron [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1030.798214] env[69475]: DEBUG nova.compute.manager [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1030.804168] env[69475]: DEBUG oslo_concurrency.lockutils [None req-00416176-ce02-4dab-b960-a7f986a73111 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.808960] env[69475]: DEBUG oslo_concurrency.lockutils [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.193s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.809250] env[69475]: DEBUG nova.objects.instance [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lazy-loading 'resources' on Instance uuid f222cc16-7581-41ff-ae7c-0538c7b3c721 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.811031] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508674, 'name': Rename_Task, 'duration_secs': 0.135641} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.813814] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1030.814094] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-27bb8cfe-9587-452a-a7f2-d1bc4d2a0a93 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.822086] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1030.822086] env[69475]: value = "task-3508675" [ 1030.822086] env[69475]: _type = "Task" [ 1030.822086] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.834279] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508675, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.879704] env[69475]: DEBUG nova.network.neutron [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updating instance_info_cache with network_info: [{"id": "91ad3911-8ea3-4bb6-bcf5-fd800e27e57f", "address": "fa:16:3e:a7:cb:82", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91ad3911-8e", "ovs_interfaceid": "91ad3911-8ea3-4bb6-bcf5-fd800e27e57f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1030.923215] env[69475]: DEBUG oslo_concurrency.lockutils [req-ab146759-8e2a-4be1-888e-687db3ebf0c2 req-2894fce7-6567-4482-a588-d5b6aabdba23 service nova] Releasing lock "refresh_cache-f8a82046-4589-45d2-a7a3-466fe4d8f9c6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1031.026620] env[69475]: DEBUG nova.network.neutron [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Successfully created port: cf56fb83-6fba-4e69-9e72-3cd7f5dd266c {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1031.097155] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5243179b-5728-f77f-c292-3ed4f6c4730c, 'name': SearchDatastore_Task, 'duration_secs': 0.008611} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.098188] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfa85118-3a5b-43f0-ba42-14c6e1d1b822 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.105070] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1031.105070] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52383083-5b67-445a-a4c6-7a326646968d" [ 1031.105070] env[69475]: _type = "Task" [ 1031.105070] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.118139] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52383083-5b67-445a-a4c6-7a326646968d, 'name': SearchDatastore_Task, 'duration_secs': 0.009751} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.118436] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1031.118699] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] f8a82046-4589-45d2-a7a3-466fe4d8f9c6/f8a82046-4589-45d2-a7a3-466fe4d8f9c6.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1031.118984] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.119186] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1031.119400] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2055c09c-b9ca-435c-bad9-56b377442279 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.121534] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b597a88-5140-4e55-a994-4217f05aeb4b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.127972] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1031.127972] env[69475]: value = "task-3508676" [ 1031.127972] env[69475]: _type = "Task" [ 1031.127972] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.132124] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1031.132374] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1031.133360] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45ba7dd7-7ea9-4855-8cca-17b95898f574 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.138853] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508676, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.142027] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1031.142027] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523adf75-a1ec-5eb0-fe03-20184a8aab5d" [ 1031.142027] env[69475]: _type = "Task" [ 1031.142027] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.149430] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523adf75-a1ec-5eb0-fe03-20184a8aab5d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.337148] env[69475]: DEBUG oslo_vmware.api [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508675, 'name': PowerOnVM_Task, 'duration_secs': 0.510787} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.337702] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1031.338123] env[69475]: INFO nova.compute.manager [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Took 6.82 seconds to spawn the instance on the hypervisor. [ 1031.338446] env[69475]: DEBUG nova.compute.manager [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1031.339381] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82924d67-b50a-432e-a1b4-89995d6bf7d4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.383658] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1031.608156] env[69475]: DEBUG nova.network.neutron [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1031.643294] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508676, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.663117] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523adf75-a1ec-5eb0-fe03-20184a8aab5d, 'name': SearchDatastore_Task, 'duration_secs': 0.017639} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.663117] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d9ee0bd-924a-41d4-a69e-a46000d67310 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.671221] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1031.671221] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e37c8b-680a-0fa1-1e0d-9720c07124ea" [ 1031.671221] env[69475]: _type = "Task" [ 1031.671221] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.686860] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e37c8b-680a-0fa1-1e0d-9720c07124ea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.726575] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4c0e04-43d9-44cb-a9f7-21d59bedd120 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.739642] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab7e9b50-56bf-4f31-9d58-673034dbc067 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.811605] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-094f6449-1886-4448-b244-e3f8757e775d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.818041] env[69475]: DEBUG nova.compute.manager [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1031.827813] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2a7f71-777b-484a-8957-e09ac57a4f61 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.868230] env[69475]: DEBUG nova.compute.provider_tree [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.873719] env[69475]: INFO nova.compute.manager [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Took 28.82 seconds to build instance. [ 1031.885971] env[69475]: DEBUG nova.virt.hardware [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1031.885971] env[69475]: DEBUG nova.virt.hardware [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1031.885971] env[69475]: DEBUG nova.virt.hardware [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1031.885971] env[69475]: DEBUG nova.virt.hardware [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1031.885971] env[69475]: DEBUG nova.virt.hardware [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1031.885971] env[69475]: DEBUG nova.virt.hardware [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1031.885971] env[69475]: DEBUG nova.virt.hardware [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1031.885971] env[69475]: DEBUG nova.virt.hardware [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1031.885971] env[69475]: DEBUG nova.virt.hardware [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1031.885971] env[69475]: DEBUG nova.virt.hardware [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1031.885971] env[69475]: DEBUG nova.virt.hardware [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1031.886409] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8785442-1c83-49e8-88a5-40675b4bae2e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.900386] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d3e9204-1c87-4039-873d-a3cecd5e749a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.922780] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1f76e1-c690-48eb-a220-eb2ee4d04db2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.946445] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1e9841-f800-4ca9-a656-cfa3387b1522 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.953670] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updating instance '82236043-3222-4134-8717-4c239ed12aba' progress to 83 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1032.071624] env[69475]: DEBUG nova.network.neutron [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Updating instance_info_cache with network_info: [{"id": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "address": "fa:16:3e:34:5a:a2", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d51ee71-84", "ovs_interfaceid": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.091018] env[69475]: DEBUG nova.compute.manager [req-da70295f-379d-47a2-806d-92ad8924b14e req-e3621df5-aa83-4888-8c98-ff80789192ba service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Received event network-vif-plugged-9d51ee71-8419-4657-9a34-44bec2faf3c2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1032.091255] env[69475]: DEBUG oslo_concurrency.lockutils [req-da70295f-379d-47a2-806d-92ad8924b14e req-e3621df5-aa83-4888-8c98-ff80789192ba service nova] Acquiring lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1032.091498] env[69475]: DEBUG oslo_concurrency.lockutils [req-da70295f-379d-47a2-806d-92ad8924b14e req-e3621df5-aa83-4888-8c98-ff80789192ba service nova] Lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.091675] env[69475]: DEBUG oslo_concurrency.lockutils [req-da70295f-379d-47a2-806d-92ad8924b14e req-e3621df5-aa83-4888-8c98-ff80789192ba service nova] Lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.091843] env[69475]: DEBUG nova.compute.manager [req-da70295f-379d-47a2-806d-92ad8924b14e req-e3621df5-aa83-4888-8c98-ff80789192ba service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] No waiting events found dispatching network-vif-plugged-9d51ee71-8419-4657-9a34-44bec2faf3c2 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1032.092041] env[69475]: WARNING nova.compute.manager [req-da70295f-379d-47a2-806d-92ad8924b14e req-e3621df5-aa83-4888-8c98-ff80789192ba service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Received unexpected event network-vif-plugged-9d51ee71-8419-4657-9a34-44bec2faf3c2 for instance with vm_state building and task_state spawning. [ 1032.092602] env[69475]: DEBUG nova.compute.manager [req-da70295f-379d-47a2-806d-92ad8924b14e req-e3621df5-aa83-4888-8c98-ff80789192ba service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Received event network-changed-9d51ee71-8419-4657-9a34-44bec2faf3c2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1032.092943] env[69475]: DEBUG nova.compute.manager [req-da70295f-379d-47a2-806d-92ad8924b14e req-e3621df5-aa83-4888-8c98-ff80789192ba service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Refreshing instance network info cache due to event network-changed-9d51ee71-8419-4657-9a34-44bec2faf3c2. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1032.093871] env[69475]: DEBUG oslo_concurrency.lockutils [req-da70295f-379d-47a2-806d-92ad8924b14e req-e3621df5-aa83-4888-8c98-ff80789192ba service nova] Acquiring lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.139916] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508676, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.595838} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.140474] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] f8a82046-4589-45d2-a7a3-466fe4d8f9c6/f8a82046-4589-45d2-a7a3-466fe4d8f9c6.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1032.140706] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1032.140960] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-df68e454-5643-40da-9cb7-fdf9c1e294e3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.148452] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1032.148452] env[69475]: value = "task-3508677" [ 1032.148452] env[69475]: _type = "Task" [ 1032.148452] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.156620] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508677, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.188946] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e37c8b-680a-0fa1-1e0d-9720c07124ea, 'name': SearchDatastore_Task, 'duration_secs': 0.039949} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.188946] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1032.188946] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 44bcaa36-ecd9-448b-b589-7c32066ede1d/44bcaa36-ecd9-448b-b589-7c32066ede1d.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1032.188946] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2aaee6f5-2fd6-41c4-800a-24fbf2bbc96d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.193922] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1032.193922] env[69475]: value = "task-3508678" [ 1032.193922] env[69475]: _type = "Task" [ 1032.193922] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.203044] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508678, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.372375] env[69475]: DEBUG nova.scheduler.client.report [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1032.377147] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e38e1c6-a3ac-4bea-99b7-a60fffa5ba96 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "4100fb43-1dae-40b1-8caa-11dd67962274" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.334s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.461143] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2bb59c5e-20bc-48d6-983c-8e08d9ff383e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updating instance '82236043-3222-4134-8717-4c239ed12aba' progress to 100 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1032.574089] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1032.574441] env[69475]: DEBUG nova.compute.manager [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Instance network_info: |[{"id": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "address": "fa:16:3e:34:5a:a2", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d51ee71-84", "ovs_interfaceid": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1032.575142] env[69475]: DEBUG oslo_concurrency.lockutils [req-da70295f-379d-47a2-806d-92ad8924b14e req-e3621df5-aa83-4888-8c98-ff80789192ba service nova] Acquired lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.575343] env[69475]: DEBUG nova.network.neutron [req-da70295f-379d-47a2-806d-92ad8924b14e req-e3621df5-aa83-4888-8c98-ff80789192ba service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Refreshing network info cache for port 9d51ee71-8419-4657-9a34-44bec2faf3c2 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1032.576535] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:5a:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4954661-ff70-43dd-bc60-8cbca6b9cbfa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9d51ee71-8419-4657-9a34-44bec2faf3c2', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1032.585375] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1032.588938] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1032.590017] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a48bb6cb-6cce-45ad-8d14-970148d02e5f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.619491] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1032.619491] env[69475]: value = "task-3508679" [ 1032.619491] env[69475]: _type = "Task" [ 1032.619491] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.627969] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508679, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.659761] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508677, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075607} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.660077] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1032.660884] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83068c1f-262d-4055-bfe7-9c4242d0c0ef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.685058] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] f8a82046-4589-45d2-a7a3-466fe4d8f9c6/f8a82046-4589-45d2-a7a3-466fe4d8f9c6.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1032.685766] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-edb0d30e-1013-4e70-8d02-6f05a4204595 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.709247] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508678, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.710521] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1032.710521] env[69475]: value = "task-3508680" [ 1032.710521] env[69475]: _type = "Task" [ 1032.710521] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.718333] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508680, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.878866] env[69475]: DEBUG oslo_concurrency.lockutils [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.070s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.887782] env[69475]: DEBUG oslo_concurrency.lockutils [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.277s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.888145] env[69475]: INFO nova.compute.claims [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1032.926484] env[69475]: INFO nova.scheduler.client.report [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Deleted allocations for instance f222cc16-7581-41ff-ae7c-0538c7b3c721 [ 1033.032301] env[69475]: DEBUG nova.network.neutron [req-da70295f-379d-47a2-806d-92ad8924b14e req-e3621df5-aa83-4888-8c98-ff80789192ba service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Updated VIF entry in instance network info cache for port 9d51ee71-8419-4657-9a34-44bec2faf3c2. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1033.032696] env[69475]: DEBUG nova.network.neutron [req-da70295f-379d-47a2-806d-92ad8924b14e req-e3621df5-aa83-4888-8c98-ff80789192ba service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Updating instance_info_cache with network_info: [{"id": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "address": "fa:16:3e:34:5a:a2", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d51ee71-84", "ovs_interfaceid": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.134676] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508679, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.209983] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508678, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.91118} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.210480] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 44bcaa36-ecd9-448b-b589-7c32066ede1d/44bcaa36-ecd9-448b-b589-7c32066ede1d.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1033.210708] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1033.211638] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1d6889a-ab1d-44bc-8b91-aff082437146 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.223572] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508680, 'name': ReconfigVM_Task, 'duration_secs': 0.464138} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.224933] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Reconfigured VM instance instance-0000005e to attach disk [datastore1] f8a82046-4589-45d2-a7a3-466fe4d8f9c6/f8a82046-4589-45d2-a7a3-466fe4d8f9c6.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1033.226166] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1033.226166] env[69475]: value = "task-3508681" [ 1033.226166] env[69475]: _type = "Task" [ 1033.226166] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.226974] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37a37996-174f-405e-acdc-1d853db0d15e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.239875] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508681, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.241947] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1033.241947] env[69475]: value = "task-3508682" [ 1033.241947] env[69475]: _type = "Task" [ 1033.241947] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.251406] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508682, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.438666] env[69475]: DEBUG oslo_concurrency.lockutils [None req-045dbd29-6aaf-456c-a8d1-2473f3e00ee3 tempest-ImagesTestJSON-1045980182 tempest-ImagesTestJSON-1045980182-project-member] Lock "f222cc16-7581-41ff-ae7c-0538c7b3c721" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.340s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.537662] env[69475]: DEBUG oslo_concurrency.lockutils [req-da70295f-379d-47a2-806d-92ad8924b14e req-e3621df5-aa83-4888-8c98-ff80789192ba service nova] Releasing lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.544535] env[69475]: DEBUG nova.network.neutron [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Successfully updated port: cf56fb83-6fba-4e69-9e72-3cd7f5dd266c {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1033.631625] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508679, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.740817] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508681, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070233} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.741503] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1033.742441] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871e4cb8-941a-47f4-9dfb-f61754d9e6c8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.754184] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508682, 'name': Rename_Task, 'duration_secs': 0.147503} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.762225] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1033.770984] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] 44bcaa36-ecd9-448b-b589-7c32066ede1d/44bcaa36-ecd9-448b-b589-7c32066ede1d.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1033.771363] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-70fc1315-a5eb-4075-83f9-6fe5a7454c85 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.772931] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35930b42-ffef-4a22-82f1-d5827d835b8a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.792436] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1033.792436] env[69475]: value = "task-3508683" [ 1033.792436] env[69475]: _type = "Task" [ 1033.792436] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.793876] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1033.793876] env[69475]: value = "task-3508684" [ 1033.793876] env[69475]: _type = "Task" [ 1033.793876] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.809280] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508683, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.809731] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508684, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.038111] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Acquiring lock "cd0e8c6a-700a-47f8-9a4c-054b84a59a7f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.038355] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Lock "cd0e8c6a-700a-47f8-9a4c-054b84a59a7f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.051074] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "refresh_cache-24ef554b-30bf-4e28-856e-98eb7ec2618b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.051074] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "refresh_cache-24ef554b-30bf-4e28-856e-98eb7ec2618b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.051217] env[69475]: DEBUG nova.network.neutron [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1034.134420] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508679, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.225755] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d86cdc2-1f74-4256-8b2e-de17c122284b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.234201] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8645db56-a0ed-4591-ab39-c41ad4fd6eba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.270475] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64257486-0e2e-4268-8463-39534409713d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.278555] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-908ebaeb-bf52-4f2d-94f5-c7e11b27ef2a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.297088] env[69475]: DEBUG nova.compute.provider_tree [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1034.304452] env[69475]: DEBUG nova.compute.manager [req-b86a77b7-8b5f-4705-bb05-ecb43943b7d5 req-193d5655-7ec2-465d-b8d1-a136918bcb94 service nova] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Received event network-vif-plugged-cf56fb83-6fba-4e69-9e72-3cd7f5dd266c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1034.306024] env[69475]: DEBUG oslo_concurrency.lockutils [req-b86a77b7-8b5f-4705-bb05-ecb43943b7d5 req-193d5655-7ec2-465d-b8d1-a136918bcb94 service nova] Acquiring lock "24ef554b-30bf-4e28-856e-98eb7ec2618b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.306024] env[69475]: DEBUG oslo_concurrency.lockutils [req-b86a77b7-8b5f-4705-bb05-ecb43943b7d5 req-193d5655-7ec2-465d-b8d1-a136918bcb94 service nova] Lock "24ef554b-30bf-4e28-856e-98eb7ec2618b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.306024] env[69475]: DEBUG oslo_concurrency.lockutils [req-b86a77b7-8b5f-4705-bb05-ecb43943b7d5 req-193d5655-7ec2-465d-b8d1-a136918bcb94 service nova] Lock "24ef554b-30bf-4e28-856e-98eb7ec2618b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.306024] env[69475]: DEBUG nova.compute.manager [req-b86a77b7-8b5f-4705-bb05-ecb43943b7d5 req-193d5655-7ec2-465d-b8d1-a136918bcb94 service nova] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] No waiting events found dispatching network-vif-plugged-cf56fb83-6fba-4e69-9e72-3cd7f5dd266c {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1034.306024] env[69475]: WARNING nova.compute.manager [req-b86a77b7-8b5f-4705-bb05-ecb43943b7d5 req-193d5655-7ec2-465d-b8d1-a136918bcb94 service nova] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Received unexpected event network-vif-plugged-cf56fb83-6fba-4e69-9e72-3cd7f5dd266c for instance with vm_state building and task_state spawning. [ 1034.306024] env[69475]: DEBUG nova.compute.manager [req-b86a77b7-8b5f-4705-bb05-ecb43943b7d5 req-193d5655-7ec2-465d-b8d1-a136918bcb94 service nova] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Received event network-changed-cf56fb83-6fba-4e69-9e72-3cd7f5dd266c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1034.306024] env[69475]: DEBUG nova.compute.manager [req-b86a77b7-8b5f-4705-bb05-ecb43943b7d5 req-193d5655-7ec2-465d-b8d1-a136918bcb94 service nova] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Refreshing instance network info cache due to event network-changed-cf56fb83-6fba-4e69-9e72-3cd7f5dd266c. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1034.306504] env[69475]: DEBUG oslo_concurrency.lockutils [req-b86a77b7-8b5f-4705-bb05-ecb43943b7d5 req-193d5655-7ec2-465d-b8d1-a136918bcb94 service nova] Acquiring lock "refresh_cache-24ef554b-30bf-4e28-856e-98eb7ec2618b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.308052] env[69475]: DEBUG nova.scheduler.client.report [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1034.321686] env[69475]: DEBUG oslo_vmware.api [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508683, 'name': PowerOnVM_Task, 'duration_secs': 0.503529} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.325606] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1034.327110] env[69475]: INFO nova.compute.manager [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Took 7.42 seconds to spawn the instance on the hypervisor. [ 1034.327110] env[69475]: DEBUG nova.compute.manager [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1034.327110] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508684, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.327509] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-935b7a98-6fa1-40a8-bdc3-6c2b8e4da710 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.544686] env[69475]: DEBUG nova.compute.manager [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1034.618592] env[69475]: DEBUG nova.network.neutron [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1034.631716] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508679, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.814574] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508684, 'name': ReconfigVM_Task, 'duration_secs': 0.947987} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.816016] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Reconfigured VM instance instance-0000005c to attach disk [datastore1] 44bcaa36-ecd9-448b-b589-7c32066ede1d/44bcaa36-ecd9-448b-b589-7c32066ede1d.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1034.817530] env[69475]: DEBUG oslo_concurrency.lockutils [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.932s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.818112] env[69475]: DEBUG nova.compute.manager [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1034.822184] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7470bd96-1a89-4cea-8a8e-5c6cd4e8f70c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.827155] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.762s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.827155] env[69475]: INFO nova.compute.claims [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1034.834190] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1034.834190] env[69475]: value = "task-3508685" [ 1034.834190] env[69475]: _type = "Task" [ 1034.834190] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.835048] env[69475]: DEBUG oslo_concurrency.lockutils [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "82236043-3222-4134-8717-4c239ed12aba" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.835431] env[69475]: DEBUG oslo_concurrency.lockutils [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "82236043-3222-4134-8717-4c239ed12aba" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.835707] env[69475]: DEBUG nova.compute.manager [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Going to confirm migration 5 {{(pid=69475) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1034.848656] env[69475]: INFO nova.compute.manager [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Took 31.10 seconds to build instance. [ 1034.858050] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508685, 'name': Rename_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.860794] env[69475]: DEBUG nova.network.neutron [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Updating instance_info_cache with network_info: [{"id": "cf56fb83-6fba-4e69-9e72-3cd7f5dd266c", "address": "fa:16:3e:60:d6:87", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf56fb83-6f", "ovs_interfaceid": "cf56fb83-6fba-4e69-9e72-3cd7f5dd266c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1034.956077] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Acquiring lock "951c225b-d930-449f-81b5-4f28f9dd27e5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.958461] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Lock "951c225b-d930-449f-81b5-4f28f9dd27e5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.066147] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.131748] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508679, 'name': CreateVM_Task, 'duration_secs': 2.055886} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.131916] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1035.132609] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.133326] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.133326] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1035.133426] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0c5f84a9-9540-4673-8af4-60d225e5403c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.138984] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1035.138984] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c25e6c-7438-210e-11a9-b471f97d57e5" [ 1035.138984] env[69475]: _type = "Task" [ 1035.138984] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.146528] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c25e6c-7438-210e-11a9-b471f97d57e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.327482] env[69475]: DEBUG nova.compute.utils [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1035.328955] env[69475]: DEBUG nova.compute.manager [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1035.329625] env[69475]: DEBUG nova.network.neutron [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1035.353806] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a84f3bf1-5b3d-4d9c-b4ed-3792c7a9f65e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "f8a82046-4589-45d2-a7a3-466fe4d8f9c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.612s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.354312] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508685, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.363260] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "refresh_cache-24ef554b-30bf-4e28-856e-98eb7ec2618b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.363478] env[69475]: DEBUG nova.compute.manager [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Instance network_info: |[{"id": "cf56fb83-6fba-4e69-9e72-3cd7f5dd266c", "address": "fa:16:3e:60:d6:87", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf56fb83-6f", "ovs_interfaceid": "cf56fb83-6fba-4e69-9e72-3cd7f5dd266c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1035.363786] env[69475]: DEBUG oslo_concurrency.lockutils [req-b86a77b7-8b5f-4705-bb05-ecb43943b7d5 req-193d5655-7ec2-465d-b8d1-a136918bcb94 service nova] Acquired lock "refresh_cache-24ef554b-30bf-4e28-856e-98eb7ec2618b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.363964] env[69475]: DEBUG nova.network.neutron [req-b86a77b7-8b5f-4705-bb05-ecb43943b7d5 req-193d5655-7ec2-465d-b8d1-a136918bcb94 service nova] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Refreshing network info cache for port cf56fb83-6fba-4e69-9e72-3cd7f5dd266c {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1035.365149] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:d6:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e23c1d18-c841-49ea-95f3-df5ceac28afd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cf56fb83-6fba-4e69-9e72-3cd7f5dd266c', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1035.374106] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1035.374884] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1035.375118] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8b2b079f-ccd8-4a53-b2e5-6793f2ee4a35 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.403235] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1035.403235] env[69475]: value = "task-3508686" [ 1035.403235] env[69475]: _type = "Task" [ 1035.403235] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.412663] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508686, 'name': CreateVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.452139] env[69475]: DEBUG oslo_concurrency.lockutils [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.452139] env[69475]: DEBUG oslo_concurrency.lockutils [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.452139] env[69475]: DEBUG nova.network.neutron [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1035.452139] env[69475]: DEBUG nova.objects.instance [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lazy-loading 'info_cache' on Instance uuid 82236043-3222-4134-8717-4c239ed12aba {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1035.463473] env[69475]: DEBUG nova.compute.manager [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1035.489232] env[69475]: DEBUG nova.policy [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '11c9c75b1984423f860daec9827e7ce6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67d27343d8c04fc9a2bed7a764f6cf82', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1035.506734] env[69475]: INFO nova.compute.manager [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Rescuing [ 1035.507034] env[69475]: DEBUG oslo_concurrency.lockutils [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "refresh_cache-f8a82046-4589-45d2-a7a3-466fe4d8f9c6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.509120] env[69475]: DEBUG oslo_concurrency.lockutils [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired lock "refresh_cache-f8a82046-4589-45d2-a7a3-466fe4d8f9c6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.509120] env[69475]: DEBUG nova.network.neutron [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1035.650729] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c25e6c-7438-210e-11a9-b471f97d57e5, 'name': SearchDatastore_Task, 'duration_secs': 0.03346} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.651476] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.652298] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1035.652705] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.653284] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.654073] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1035.654073] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69582714-08d2-4bb5-82a2-b91c0c8550cd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.663463] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1035.663945] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1035.665030] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7de16ea2-7f96-4d06-a3ae-585db901109d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.670529] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1035.670529] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a86d8f-abb4-324f-6a75-7700421696da" [ 1035.670529] env[69475]: _type = "Task" [ 1035.670529] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.681262] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a86d8f-abb4-324f-6a75-7700421696da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.834878] env[69475]: DEBUG nova.compute.manager [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1035.860522] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508685, 'name': Rename_Task, 'duration_secs': 1.02366} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.860522] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1035.860522] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d0e64ccd-8159-4a5f-a29b-96876675d836 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.867144] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1035.867144] env[69475]: value = "task-3508687" [ 1035.867144] env[69475]: _type = "Task" [ 1035.867144] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.886143] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508687, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.917669] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508686, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.984610] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1036.180933] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a86d8f-abb4-324f-6a75-7700421696da, 'name': SearchDatastore_Task, 'duration_secs': 0.00979} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.189020] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b8db6b9-819c-425d-8a2c-9d7284766ee8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.193939] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1036.193939] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5261891d-67b2-136b-a039-e855f1e383b0" [ 1036.193939] env[69475]: _type = "Task" [ 1036.193939] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.202455] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5261891d-67b2-136b-a039-e855f1e383b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.231118] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac2c6d12-2225-4e02-8982-37713efe28cd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.239760] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b8d64ab-cbfe-4a54-bebe-9e98b14333f9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.273917] env[69475]: DEBUG nova.network.neutron [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Updating instance_info_cache with network_info: [{"id": "24efd80f-72cd-4c40-962a-103b1ca55a1f", "address": "fa:16:3e:52:a2:60", "network": {"id": "a5ecc342-ff26-4d68-9810-30407b73463d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-941429832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a68f54aa603f46468f50c83cd4fa3e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24efd80f-72", "ovs_interfaceid": "24efd80f-72cd-4c40-962a-103b1ca55a1f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.278120] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0f14974-a52a-4d07-a8db-78b6f1b27162 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.283923] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57b9f336-3d31-4005-9739-e1a87cd68a98 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.299729] env[69475]: DEBUG nova.compute.provider_tree [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1036.380660] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508687, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.416408] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508686, 'name': CreateVM_Task, 'duration_secs': 0.609364} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.416589] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1036.417377] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.417699] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.418207] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1036.418290] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e2392664-4126-4a7e-a58a-d2b57508c79d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.422618] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 1036.422618] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d4e76c-fa02-ef42-6f16-fcc1b07af5d9" [ 1036.422618] env[69475]: _type = "Task" [ 1036.422618] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.430148] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d4e76c-fa02-ef42-6f16-fcc1b07af5d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.477254] env[69475]: DEBUG nova.network.neutron [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Successfully created port: 316b2c71-6909-4d98-a09c-c3c58878a1ed {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1036.679929] env[69475]: DEBUG nova.network.neutron [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updating instance_info_cache with network_info: [{"id": "91ad3911-8ea3-4bb6-bcf5-fd800e27e57f", "address": "fa:16:3e:a7:cb:82", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91ad3911-8e", "ovs_interfaceid": "91ad3911-8ea3-4bb6-bcf5-fd800e27e57f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.704269] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5261891d-67b2-136b-a039-e855f1e383b0, 'name': SearchDatastore_Task, 'duration_secs': 0.01081} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.704562] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.704803] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35/1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1036.705072] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0f7d85f-dee7-4a27-96d4-f19e2f3aab34 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.712405] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1036.712405] env[69475]: value = "task-3508688" [ 1036.712405] env[69475]: _type = "Task" [ 1036.712405] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.720195] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508688, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.778980] env[69475]: DEBUG oslo_concurrency.lockutils [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Releasing lock "refresh_cache-f8a82046-4589-45d2-a7a3-466fe4d8f9c6" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.803278] env[69475]: DEBUG nova.scheduler.client.report [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1036.850199] env[69475]: DEBUG nova.compute.manager [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1036.876239] env[69475]: DEBUG nova.virt.hardware [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1036.876528] env[69475]: DEBUG nova.virt.hardware [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1036.876696] env[69475]: DEBUG nova.virt.hardware [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1036.876896] env[69475]: DEBUG nova.virt.hardware [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1036.877073] env[69475]: DEBUG nova.virt.hardware [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1036.877314] env[69475]: DEBUG nova.virt.hardware [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1036.877463] env[69475]: DEBUG nova.virt.hardware [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1036.877636] env[69475]: DEBUG nova.virt.hardware [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1036.877825] env[69475]: DEBUG nova.virt.hardware [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1036.877991] env[69475]: DEBUG nova.virt.hardware [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1036.878205] env[69475]: DEBUG nova.virt.hardware [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1036.879042] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c2d67d5-5937-4c27-8666-b318ff16a89d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.882415] env[69475]: DEBUG nova.network.neutron [req-b86a77b7-8b5f-4705-bb05-ecb43943b7d5 req-193d5655-7ec2-465d-b8d1-a136918bcb94 service nova] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Updated VIF entry in instance network info cache for port cf56fb83-6fba-4e69-9e72-3cd7f5dd266c. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1036.882644] env[69475]: DEBUG nova.network.neutron [req-b86a77b7-8b5f-4705-bb05-ecb43943b7d5 req-193d5655-7ec2-465d-b8d1-a136918bcb94 service nova] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Updating instance_info_cache with network_info: [{"id": "cf56fb83-6fba-4e69-9e72-3cd7f5dd266c", "address": "fa:16:3e:60:d6:87", "network": {"id": "24115ce5-3e3f-4419-9144-03ad84d7f63b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-440832883-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1073981d0d7740e78805798e02ff9d55", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e23c1d18-c841-49ea-95f3-df5ceac28afd", "external-id": "nsx-vlan-transportzone-774", "segmentation_id": 774, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcf56fb83-6f", "ovs_interfaceid": "cf56fb83-6fba-4e69-9e72-3cd7f5dd266c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.889181] env[69475]: DEBUG oslo_vmware.api [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508687, 'name': PowerOnVM_Task, 'duration_secs': 0.530128} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.891422] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1036.891658] env[69475]: DEBUG nova.compute.manager [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1036.892510] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567156bc-6df6-42bf-b50e-656a6b504dd0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.896123] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d63596-196a-43c2-8d00-083681fa4ccf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.932554] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d4e76c-fa02-ef42-6f16-fcc1b07af5d9, 'name': SearchDatastore_Task, 'duration_secs': 0.035082} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.932846] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.933114] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1036.934020] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.934020] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1036.934020] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1036.934185] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4c61c0be-8cb6-496a-bbfb-35c9d6689a7f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.942986] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1036.943227] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1036.944761] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f282717-66d2-4fa3-a638-d63ec7ee4229 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.950802] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 1036.950802] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52735b26-26fa-9a21-0cd6-2c442bd0b9ff" [ 1036.950802] env[69475]: _type = "Task" [ 1036.950802] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.961578] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52735b26-26fa-9a21-0cd6-2c442bd0b9ff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.183349] env[69475]: DEBUG oslo_concurrency.lockutils [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.183661] env[69475]: DEBUG nova.objects.instance [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lazy-loading 'migration_context' on Instance uuid 82236043-3222-4134-8717-4c239ed12aba {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.228227] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508688, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.310532] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.486s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1037.312944] env[69475]: DEBUG nova.compute.manager [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1037.314333] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.433s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.314483] env[69475]: DEBUG nova.objects.instance [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lazy-loading 'resources' on Instance uuid 8963b50c-29ca-49fd-8289-1e1b7583ca25 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.385771] env[69475]: DEBUG oslo_concurrency.lockutils [req-b86a77b7-8b5f-4705-bb05-ecb43943b7d5 req-193d5655-7ec2-465d-b8d1-a136918bcb94 service nova] Releasing lock "refresh_cache-24ef554b-30bf-4e28-856e-98eb7ec2618b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.425320] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.462433] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52735b26-26fa-9a21-0cd6-2c442bd0b9ff, 'name': SearchDatastore_Task, 'duration_secs': 0.077601} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.463355] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbe13fb2-e539-4eeb-b4b9-677e3d768d5b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.468968] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 1037.468968] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5270a0ee-cee6-9012-f99d-59d6217815ce" [ 1037.468968] env[69475]: _type = "Task" [ 1037.468968] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.476656] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5270a0ee-cee6-9012-f99d-59d6217815ce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.692022] env[69475]: DEBUG nova.objects.base [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Object Instance<82236043-3222-4134-8717-4c239ed12aba> lazy-loaded attributes: info_cache,migration_context {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1037.692022] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7600c53-06a7-44e0-b852-0b017b5453bb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.710307] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60361d5c-ddad-4bce-9a89-5e254801a750 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.719267] env[69475]: DEBUG oslo_vmware.api [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1037.719267] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5250f650-81a5-e95a-1f93-f5ad19253956" [ 1037.719267] env[69475]: _type = "Task" [ 1037.719267] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.726556] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508688, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.579108} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.727266] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35/1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1037.727514] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1037.727791] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-219287f5-7706-45aa-b90f-a362182f0c09 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.732987] env[69475]: DEBUG oslo_vmware.api [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5250f650-81a5-e95a-1f93-f5ad19253956, 'name': SearchDatastore_Task, 'duration_secs': 0.010762} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.733649] env[69475]: DEBUG oslo_concurrency.lockutils [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.737828] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1037.737828] env[69475]: value = "task-3508689" [ 1037.737828] env[69475]: _type = "Task" [ 1037.737828] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.745756] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508689, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.817476] env[69475]: DEBUG nova.compute.utils [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1037.822169] env[69475]: DEBUG nova.compute.manager [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1037.822378] env[69475]: DEBUG nova.network.neutron [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1037.866067] env[69475]: DEBUG nova.policy [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba09f56e4fda4fc99602796a0af6cb33', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e87670cfd2b848af98507a5ebf9fab51', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1037.869151] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "8f18d683-7734-4798-8963-7336fe229f16" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.869394] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "8f18d683-7734-4798-8963-7336fe229f16" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.869567] env[69475]: INFO nova.compute.manager [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Shelving [ 1037.982289] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5270a0ee-cee6-9012-f99d-59d6217815ce, 'name': SearchDatastore_Task, 'duration_secs': 0.088696} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.982564] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.982829] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 24ef554b-30bf-4e28-856e-98eb7ec2618b/24ef554b-30bf-4e28-856e-98eb7ec2618b.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1037.983104] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8dc239d6-003a-4adc-8448-24a13efb2094 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.990264] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 1037.990264] env[69475]: value = "task-3508690" [ 1037.990264] env[69475]: _type = "Task" [ 1037.990264] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.002694] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508690, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.147674] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffdd7fe8-5146-495c-a664-f7b752ded67c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.155037] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebadd724-e327-4b9d-af56-d08f0320e8b4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.188455] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92aba1bd-c11d-4f44-a38d-2518ac05bf7e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.198612] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d410ee1-bfdb-48e5-8d87-2aa505055512 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.216342] env[69475]: DEBUG nova.compute.provider_tree [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.247079] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508689, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.265902} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.247851] env[69475]: DEBUG nova.network.neutron [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Successfully created port: fbde5d12-5376-4f30-a0eb-1e63c7d36242 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1038.249724] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1038.250507] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b021dc82-d70b-46fe-b3ef-3e5e91e203f2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.273304] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35/1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1038.273515] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f7ae60c-8ac2-405c-be05-76537e7aca56 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.294155] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1038.294155] env[69475]: value = "task-3508691" [ 1038.294155] env[69475]: _type = "Task" [ 1038.294155] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.303076] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508691, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.322551] env[69475]: DEBUG nova.compute.manager [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1038.331084] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1038.331296] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c6afdfe-d77b-4b72-ac62-61a5775652a2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.338172] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1038.338172] env[69475]: value = "task-3508692" [ 1038.338172] env[69475]: _type = "Task" [ 1038.338172] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.347088] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508692, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.502371] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508690, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.551770] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Acquiring lock "44bcaa36-ecd9-448b-b589-7c32066ede1d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.552079] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Lock "44bcaa36-ecd9-448b-b589-7c32066ede1d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.552339] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Acquiring lock "44bcaa36-ecd9-448b-b589-7c32066ede1d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1038.552537] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Lock "44bcaa36-ecd9-448b-b589-7c32066ede1d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1038.552709] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Lock "44bcaa36-ecd9-448b-b589-7c32066ede1d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.555186] env[69475]: INFO nova.compute.manager [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Terminating instance [ 1038.719761] env[69475]: DEBUG nova.scheduler.client.report [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1038.806398] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508691, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.848680] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508692, 'name': PowerOffVM_Task, 'duration_secs': 0.229802} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.848963] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1038.849777] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962802ff-74ec-43af-919b-35045d82989c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.872281] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce01d7d4-4dd3-4380-9dce-71a4a8b1c54f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.878813] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1038.882182] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3aa54018-8f8c-411f-b48b-b786c61f9b22 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.891770] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1038.891770] env[69475]: value = "task-3508693" [ 1038.891770] env[69475]: _type = "Task" [ 1038.891770] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.902288] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508693, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.924362] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1038.924568] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c146dba1-1850-42dc-b2e2-3baaf1a58e1d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.931462] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1038.931462] env[69475]: value = "task-3508694" [ 1038.931462] env[69475]: _type = "Task" [ 1038.931462] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.942554] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] VM already powered off {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1038.942554] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1038.942554] env[69475]: DEBUG oslo_concurrency.lockutils [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.942554] env[69475]: DEBUG oslo_concurrency.lockutils [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.942883] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1038.943039] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b28d7a53-0a04-4667-907a-e222fc1fdb8e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.949552] env[69475]: DEBUG nova.network.neutron [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Successfully updated port: 316b2c71-6909-4d98-a09c-c3c58878a1ed {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1038.954716] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1038.954716] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1038.956050] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d833bda1-2e8c-4671-97d8-41720eac6012 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.961727] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1038.961727] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ebf793-ad12-b182-c2d1-32055d8093f5" [ 1038.961727] env[69475]: _type = "Task" [ 1038.961727] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.972165] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ebf793-ad12-b182-c2d1-32055d8093f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.004027] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508690, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.820263} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.004027] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 24ef554b-30bf-4e28-856e-98eb7ec2618b/24ef554b-30bf-4e28-856e-98eb7ec2618b.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1039.004027] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1039.004027] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0495156a-0212-49bb-a7ca-dca7f6fa7d06 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.008165] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 1039.008165] env[69475]: value = "task-3508695" [ 1039.008165] env[69475]: _type = "Task" [ 1039.008165] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.016196] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508695, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.059335] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Acquiring lock "refresh_cache-44bcaa36-ecd9-448b-b589-7c32066ede1d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.059530] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Acquired lock "refresh_cache-44bcaa36-ecd9-448b-b589-7c32066ede1d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.059709] env[69475]: DEBUG nova.network.neutron [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1039.067252] env[69475]: DEBUG nova.compute.manager [req-c107fb3f-9ee5-40df-9fa1-f4eaf05847b9 req-5d750e0e-a76b-4f92-a039-fca661687957 service nova] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Received event network-vif-plugged-316b2c71-6909-4d98-a09c-c3c58878a1ed {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1039.067252] env[69475]: DEBUG oslo_concurrency.lockutils [req-c107fb3f-9ee5-40df-9fa1-f4eaf05847b9 req-5d750e0e-a76b-4f92-a039-fca661687957 service nova] Acquiring lock "20b37e69-5870-4f63-aeba-9293615da478-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.067252] env[69475]: DEBUG oslo_concurrency.lockutils [req-c107fb3f-9ee5-40df-9fa1-f4eaf05847b9 req-5d750e0e-a76b-4f92-a039-fca661687957 service nova] Lock "20b37e69-5870-4f63-aeba-9293615da478-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.067427] env[69475]: DEBUG oslo_concurrency.lockutils [req-c107fb3f-9ee5-40df-9fa1-f4eaf05847b9 req-5d750e0e-a76b-4f92-a039-fca661687957 service nova] Lock "20b37e69-5870-4f63-aeba-9293615da478-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.067550] env[69475]: DEBUG nova.compute.manager [req-c107fb3f-9ee5-40df-9fa1-f4eaf05847b9 req-5d750e0e-a76b-4f92-a039-fca661687957 service nova] [instance: 20b37e69-5870-4f63-aeba-9293615da478] No waiting events found dispatching network-vif-plugged-316b2c71-6909-4d98-a09c-c3c58878a1ed {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1039.067748] env[69475]: WARNING nova.compute.manager [req-c107fb3f-9ee5-40df-9fa1-f4eaf05847b9 req-5d750e0e-a76b-4f92-a039-fca661687957 service nova] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Received unexpected event network-vif-plugged-316b2c71-6909-4d98-a09c-c3c58878a1ed for instance with vm_state building and task_state spawning. [ 1039.226017] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.912s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.228695] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.331s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.230669] env[69475]: INFO nova.compute.claims [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1039.256283] env[69475]: INFO nova.scheduler.client.report [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleted allocations for instance 8963b50c-29ca-49fd-8289-1e1b7583ca25 [ 1039.307108] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508691, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.333711] env[69475]: DEBUG nova.compute.manager [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1039.357032] env[69475]: DEBUG nova.virt.hardware [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1039.357874] env[69475]: DEBUG nova.virt.hardware [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1039.357874] env[69475]: DEBUG nova.virt.hardware [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1039.357874] env[69475]: DEBUG nova.virt.hardware [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1039.357874] env[69475]: DEBUG nova.virt.hardware [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1039.357874] env[69475]: DEBUG nova.virt.hardware [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1039.358256] env[69475]: DEBUG nova.virt.hardware [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1039.358354] env[69475]: DEBUG nova.virt.hardware [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1039.358532] env[69475]: DEBUG nova.virt.hardware [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1039.358699] env[69475]: DEBUG nova.virt.hardware [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1039.358875] env[69475]: DEBUG nova.virt.hardware [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1039.359784] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-779fece7-0bf1-4234-8987-671e8eca3226 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.367923] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46e3887d-1d04-4504-b9af-3a19d0826f7b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.400820] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508693, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.457329] env[69475]: DEBUG oslo_concurrency.lockutils [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "refresh_cache-20b37e69-5870-4f63-aeba-9293615da478" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.457531] env[69475]: DEBUG oslo_concurrency.lockutils [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquired lock "refresh_cache-20b37e69-5870-4f63-aeba-9293615da478" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.457695] env[69475]: DEBUG nova.network.neutron [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1039.472903] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ebf793-ad12-b182-c2d1-32055d8093f5, 'name': SearchDatastore_Task, 'duration_secs': 0.010258} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.473702] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-359131e3-e2a1-4831-9a26-e580eb3d0e7a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.478811] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1039.478811] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52439c16-6073-bfe8-b5d6-5c597c773350" [ 1039.478811] env[69475]: _type = "Task" [ 1039.478811] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.486416] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52439c16-6073-bfe8-b5d6-5c597c773350, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.516810] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508695, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066559} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.517063] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1039.517790] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c4ff11-a767-4b7d-ae8b-fdb7a57b6fbe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.539846] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 24ef554b-30bf-4e28-856e-98eb7ec2618b/24ef554b-30bf-4e28-856e-98eb7ec2618b.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1039.540376] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e24f920e-e438-442c-aa09-0d06d9450079 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.560367] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 1039.560367] env[69475]: value = "task-3508696" [ 1039.560367] env[69475]: _type = "Task" [ 1039.560367] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.569937] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508696, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.584766] env[69475]: DEBUG nova.network.neutron [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1039.667333] env[69475]: DEBUG nova.network.neutron [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1039.764606] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e7c087f2-2cea-4ea1-bec2-a1e2e1118319 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "8963b50c-29ca-49fd-8289-1e1b7583ca25" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.094s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.805721] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508691, 'name': ReconfigVM_Task, 'duration_secs': 1.359039} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.806083] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35/1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1039.806824] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f797512f-2c89-4c83-9921-884ba196835b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.812844] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1039.812844] env[69475]: value = "task-3508697" [ 1039.812844] env[69475]: _type = "Task" [ 1039.812844] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.822281] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508697, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.902712] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Acquiring lock "97013703-3506-4441-b80c-cbb5c7e29bdf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.902951] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Lock "97013703-3506-4441-b80c-cbb5c7e29bdf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.908558] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508693, 'name': PowerOffVM_Task, 'duration_secs': 0.90342} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.908805] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1039.909945] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c625ed79-ab9e-45bb-9593-fd198b57ae28 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.936680] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8638c41-ddce-473c-acd9-72054043cb77 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.990115] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52439c16-6073-bfe8-b5d6-5c597c773350, 'name': SearchDatastore_Task, 'duration_secs': 0.080323} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.990115] env[69475]: DEBUG oslo_concurrency.lockutils [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1039.990235] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] f8a82046-4589-45d2-a7a3-466fe4d8f9c6/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk. {{(pid=69475) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1039.990433] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-60f7b38b-4030-4e4c-87d2-7043dadde0e3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.002780] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1040.002780] env[69475]: value = "task-3508698" [ 1040.002780] env[69475]: _type = "Task" [ 1040.002780] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.010311] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508698, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.019271] env[69475]: DEBUG nova.network.neutron [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1040.071357] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508696, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.170093] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Releasing lock "refresh_cache-44bcaa36-ecd9-448b-b589-7c32066ede1d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.170514] env[69475]: DEBUG nova.compute.manager [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1040.170718] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1040.171655] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b417cfe-d262-4c57-b779-bd5904eda14e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.179382] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1040.179700] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1c20c28-78b0-4e64-b54c-ad1e280cdaff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.185865] env[69475]: DEBUG oslo_vmware.api [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1040.185865] env[69475]: value = "task-3508699" [ 1040.185865] env[69475]: _type = "Task" [ 1040.185865] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.193638] env[69475]: DEBUG oslo_vmware.api [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508699, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.323827] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508697, 'name': Rename_Task, 'duration_secs': 0.135387} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.324245] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1040.324507] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5a0b6038-d3a2-4594-9438-105963acc7fb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.333897] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1040.333897] env[69475]: value = "task-3508700" [ 1040.333897] env[69475]: _type = "Task" [ 1040.333897] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.342173] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508700, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.376702] env[69475]: DEBUG nova.network.neutron [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Updating instance_info_cache with network_info: [{"id": "316b2c71-6909-4d98-a09c-c3c58878a1ed", "address": "fa:16:3e:d3:e8:59", "network": {"id": "b825e883-3197-4a41-a94b-f363fe49fc0d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1376961670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67d27343d8c04fc9a2bed7a764f6cf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap316b2c71-69", "ovs_interfaceid": "316b2c71-6909-4d98-a09c-c3c58878a1ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.405673] env[69475]: DEBUG nova.compute.manager [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1040.451817] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1040.451817] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-8d669517-d7dc-47e4-b5f4-35cd06f6bdf2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.455239] env[69475]: DEBUG nova.network.neutron [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Successfully updated port: fbde5d12-5376-4f30-a0eb-1e63c7d36242 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1040.466590] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1040.466590] env[69475]: value = "task-3508701" [ 1040.466590] env[69475]: _type = "Task" [ 1040.466590] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.476763] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508701, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.513960] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508698, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.575218] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508696, 'name': ReconfigVM_Task, 'duration_secs': 0.884298} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.576616] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 24ef554b-30bf-4e28-856e-98eb7ec2618b/24ef554b-30bf-4e28-856e-98eb7ec2618b.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1040.577995] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e39fd0f-2f06-431f-a8ca-1574518c632f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.580890] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb4688d5-2a21-454d-a337-dc98916aedef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.589615] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e53fd0e7-86e0-4b41-a41d-7c4b6798f69f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.594456] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 1040.594456] env[69475]: value = "task-3508702" [ 1040.594456] env[69475]: _type = "Task" [ 1040.594456] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.627636] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb6d99a3-3c3e-4518-9a47-e12f34c6ce1f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.635608] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508702, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.644206] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f3c2af-6700-4a54-8af8-5557dbbf33dc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.661670] env[69475]: DEBUG nova.compute.provider_tree [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.696222] env[69475]: DEBUG oslo_vmware.api [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508699, 'name': PowerOffVM_Task, 'duration_secs': 0.225542} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.696323] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1040.697951] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1040.697951] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-66aeb71b-ea29-4f4b-a419-bf2b7c20f13f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.724807] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1040.725072] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1040.725270] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Deleting the datastore file [datastore1] 44bcaa36-ecd9-448b-b589-7c32066ede1d {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1040.725609] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d6f87bd-161c-40cd-9146-5e6779748e7b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.742295] env[69475]: DEBUG oslo_vmware.api [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for the task: (returnval){ [ 1040.742295] env[69475]: value = "task-3508704" [ 1040.742295] env[69475]: _type = "Task" [ 1040.742295] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.749630] env[69475]: DEBUG oslo_vmware.api [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508704, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.844109] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508700, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.880120] env[69475]: DEBUG oslo_concurrency.lockutils [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Releasing lock "refresh_cache-20b37e69-5870-4f63-aeba-9293615da478" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.880478] env[69475]: DEBUG nova.compute.manager [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Instance network_info: |[{"id": "316b2c71-6909-4d98-a09c-c3c58878a1ed", "address": "fa:16:3e:d3:e8:59", "network": {"id": "b825e883-3197-4a41-a94b-f363fe49fc0d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1376961670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67d27343d8c04fc9a2bed7a764f6cf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap316b2c71-69", "ovs_interfaceid": "316b2c71-6909-4d98-a09c-c3c58878a1ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1040.880998] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:e8:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '94926d5b-bfab-4c04-85b5-0fe89934c8ff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '316b2c71-6909-4d98-a09c-c3c58878a1ed', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1040.890050] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1040.890050] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1040.890050] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4793680d-280a-4008-b602-1a20973dc93b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.914180] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1040.914180] env[69475]: value = "task-3508705" [ 1040.914180] env[69475]: _type = "Task" [ 1040.914180] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.924058] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508705, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.926878] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.958571] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "refresh_cache-4f091501-351c-45b8-9f64-4d28d4623df8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.958735] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "refresh_cache-4f091501-351c-45b8-9f64-4d28d4623df8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1040.958844] env[69475]: DEBUG nova.network.neutron [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1040.977879] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508701, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.013830] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508698, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.783221} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.014190] env[69475]: INFO nova.virt.vmwareapi.ds_util [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] f8a82046-4589-45d2-a7a3-466fe4d8f9c6/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk. [ 1041.015058] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-360dcef1-82a0-46e1-a6d8-002c0789f458 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.040233] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] f8a82046-4589-45d2-a7a3-466fe4d8f9c6/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1041.040916] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16611035-9a3c-4cbe-a996-4f12e65d9a5a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.059150] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1041.059150] env[69475]: value = "task-3508706" [ 1041.059150] env[69475]: _type = "Task" [ 1041.059150] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.069819] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508706, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.104472] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508702, 'name': Rename_Task, 'duration_secs': 0.284853} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.104773] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1041.105395] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ac948289-ecea-4020-8de9-8ff423930dfa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.112035] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 1041.112035] env[69475]: value = "task-3508707" [ 1041.112035] env[69475]: _type = "Task" [ 1041.112035] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.119972] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508707, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.164481] env[69475]: DEBUG nova.scheduler.client.report [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1041.195212] env[69475]: DEBUG nova.compute.manager [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Received event network-changed-316b2c71-6909-4d98-a09c-c3c58878a1ed {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1041.195481] env[69475]: DEBUG nova.compute.manager [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Refreshing instance network info cache due to event network-changed-316b2c71-6909-4d98-a09c-c3c58878a1ed. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1041.195733] env[69475]: DEBUG oslo_concurrency.lockutils [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] Acquiring lock "refresh_cache-20b37e69-5870-4f63-aeba-9293615da478" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.195892] env[69475]: DEBUG oslo_concurrency.lockutils [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] Acquired lock "refresh_cache-20b37e69-5870-4f63-aeba-9293615da478" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.196076] env[69475]: DEBUG nova.network.neutron [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Refreshing network info cache for port 316b2c71-6909-4d98-a09c-c3c58878a1ed {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1041.250043] env[69475]: DEBUG oslo_vmware.api [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Task: {'id': task-3508704, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12474} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.250043] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1041.250043] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1041.250043] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1041.250043] env[69475]: INFO nova.compute.manager [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1041.250043] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1041.250576] env[69475]: DEBUG nova.compute.manager [-] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1041.250576] env[69475]: DEBUG nova.network.neutron [-] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1041.273372] env[69475]: DEBUG nova.network.neutron [-] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1041.294044] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "226afd68-34d8-482e-89f9-0c45a300a803" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1041.294980] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "226afd68-34d8-482e-89f9-0c45a300a803" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1041.350107] env[69475]: DEBUG oslo_vmware.api [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508700, 'name': PowerOnVM_Task, 'duration_secs': 0.60342} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.350490] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1041.350627] env[69475]: INFO nova.compute.manager [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Took 11.96 seconds to spawn the instance on the hypervisor. [ 1041.350823] env[69475]: DEBUG nova.compute.manager [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1041.352029] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e7cf589-3ede-4bdb-8e74-0b527c5c4b52 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.426160] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508705, 'name': CreateVM_Task, 'duration_secs': 0.340053} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.426367] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1041.427090] env[69475]: DEBUG oslo_concurrency.lockutils [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.427273] env[69475]: DEBUG oslo_concurrency.lockutils [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.427628] env[69475]: DEBUG oslo_concurrency.lockutils [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1041.428230] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-717166be-d95b-4e10-94fa-932284bf68a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.433191] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1041.433191] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5202cb50-d192-d794-82d6-4d383944ba91" [ 1041.433191] env[69475]: _type = "Task" [ 1041.433191] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.440805] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5202cb50-d192-d794-82d6-4d383944ba91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.483026] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508701, 'name': CreateSnapshot_Task, 'duration_secs': 0.655943} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.483026] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1041.484288] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0861a0fe-8754-431b-8939-47f83cf5483b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.508274] env[69475]: DEBUG nova.network.neutron [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1041.571418] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508706, 'name': ReconfigVM_Task, 'duration_secs': 0.303632} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.571731] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Reconfigured VM instance instance-0000005e to attach disk [datastore1] f8a82046-4589-45d2-a7a3-466fe4d8f9c6/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1041.572606] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e9a203-c55b-4e3b-a06b-0f93527f7362 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.607393] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2a62bc1-6430-4797-9f60-a270349235fd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.628292] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508707, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.630012] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1041.630012] env[69475]: value = "task-3508708" [ 1041.630012] env[69475]: _type = "Task" [ 1041.630012] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.638957] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508708, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.673617] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.445s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.674188] env[69475]: DEBUG nova.compute.manager [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1041.677072] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.611s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1041.678572] env[69475]: INFO nova.compute.claims [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1041.777287] env[69475]: DEBUG nova.network.neutron [-] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.787289] env[69475]: DEBUG nova.network.neutron [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Updating instance_info_cache with network_info: [{"id": "fbde5d12-5376-4f30-a0eb-1e63c7d36242", "address": "fa:16:3e:dc:db:e1", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbde5d12-53", "ovs_interfaceid": "fbde5d12-5376-4f30-a0eb-1e63c7d36242", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.797380] env[69475]: DEBUG nova.compute.manager [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1041.871607] env[69475]: INFO nova.compute.manager [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Took 37.64 seconds to build instance. [ 1041.946019] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5202cb50-d192-d794-82d6-4d383944ba91, 'name': SearchDatastore_Task, 'duration_secs': 0.009603} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.946019] env[69475]: DEBUG oslo_concurrency.lockutils [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.946019] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1041.946019] env[69475]: DEBUG oslo_concurrency.lockutils [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.946019] env[69475]: DEBUG oslo_concurrency.lockutils [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.946363] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1041.946363] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3457bda3-5d68-4b6b-9d5e-936e7326e3b8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.956021] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1041.956021] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1041.956184] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07b60915-1cbd-4722-8d88-75a87c93e074 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.961198] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1041.961198] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cbd4f6-2bf8-db9b-dced-0d478e696c27" [ 1041.961198] env[69475]: _type = "Task" [ 1041.961198] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.969096] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cbd4f6-2bf8-db9b-dced-0d478e696c27, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.006398] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1042.006730] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-25ff7000-83d4-45ef-bc2a-71d4f2b5b3f0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.015101] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1042.015101] env[69475]: value = "task-3508709" [ 1042.015101] env[69475]: _type = "Task" [ 1042.015101] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.026251] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508709, 'name': CloneVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.129175] env[69475]: DEBUG oslo_vmware.api [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508707, 'name': PowerOnVM_Task, 'duration_secs': 0.529838} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.129446] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1042.129650] env[69475]: INFO nova.compute.manager [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Took 10.31 seconds to spawn the instance on the hypervisor. [ 1042.129826] env[69475]: DEBUG nova.compute.manager [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1042.130638] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20586a61-fc3e-4f90-93fb-93bcb5243c57 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.147204] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508708, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.183526] env[69475]: DEBUG nova.compute.utils [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1042.188297] env[69475]: DEBUG nova.compute.manager [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1042.188500] env[69475]: DEBUG nova.network.neutron [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1042.280542] env[69475]: INFO nova.compute.manager [-] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Took 1.03 seconds to deallocate network for instance. [ 1042.289563] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "refresh_cache-4f091501-351c-45b8-9f64-4d28d4623df8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.289970] env[69475]: DEBUG nova.compute.manager [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Instance network_info: |[{"id": "fbde5d12-5376-4f30-a0eb-1e63c7d36242", "address": "fa:16:3e:dc:db:e1", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbde5d12-53", "ovs_interfaceid": "fbde5d12-5376-4f30-a0eb-1e63c7d36242", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1042.290442] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:db:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee9ce73d-4ee8-4b28-b7d3-3a5735039627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fbde5d12-5376-4f30-a0eb-1e63c7d36242', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1042.298734] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1042.298961] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1042.299211] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-104bb78e-90ad-44e5-b6c2-856274f66e3c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.322886] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1042.322886] env[69475]: value = "task-3508710" [ 1042.322886] env[69475]: _type = "Task" [ 1042.322886] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.332703] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508710, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.337715] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.373342] env[69475]: DEBUG nova.network.neutron [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Updated VIF entry in instance network info cache for port 316b2c71-6909-4d98-a09c-c3c58878a1ed. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1042.373809] env[69475]: DEBUG nova.network.neutron [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Updating instance_info_cache with network_info: [{"id": "316b2c71-6909-4d98-a09c-c3c58878a1ed", "address": "fa:16:3e:d3:e8:59", "network": {"id": "b825e883-3197-4a41-a94b-f363fe49fc0d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1376961670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67d27343d8c04fc9a2bed7a764f6cf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap316b2c71-69", "ovs_interfaceid": "316b2c71-6909-4d98-a09c-c3c58878a1ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1042.375292] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c4fcf92b-e7a3-461a-aca4-1ca32163afba tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 39.159s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.404524] env[69475]: DEBUG nova.policy [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '50223677b1b84004ad2ae335882b0bf2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52941494ff1643f6bb75cc1320a86b88', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1042.471468] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cbd4f6-2bf8-db9b-dced-0d478e696c27, 'name': SearchDatastore_Task, 'duration_secs': 0.045745} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.472339] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0daf8e1d-2a9d-4b5e-b95b-6777d7425d51 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.479830] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1042.479830] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528b51ee-2871-8145-a2ab-64e78eb71eda" [ 1042.479830] env[69475]: _type = "Task" [ 1042.479830] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.486426] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528b51ee-2871-8145-a2ab-64e78eb71eda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.526773] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508709, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.661576] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508708, 'name': ReconfigVM_Task, 'duration_secs': 0.587263} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.662159] env[69475]: INFO nova.compute.manager [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Took 36.28 seconds to build instance. [ 1042.663190] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1042.663786] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4bde45cb-c045-4295-9888-b1021630c9ff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.671147] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1042.671147] env[69475]: value = "task-3508711" [ 1042.671147] env[69475]: _type = "Task" [ 1042.671147] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.681275] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508711, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.688408] env[69475]: DEBUG nova.compute.manager [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1042.788755] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.838145] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508710, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.876722] env[69475]: DEBUG oslo_concurrency.lockutils [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] Releasing lock "refresh_cache-20b37e69-5870-4f63-aeba-9293615da478" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.877215] env[69475]: DEBUG nova.compute.manager [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Received event network-vif-plugged-fbde5d12-5376-4f30-a0eb-1e63c7d36242 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1042.877379] env[69475]: DEBUG oslo_concurrency.lockutils [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] Acquiring lock "4f091501-351c-45b8-9f64-4d28d4623df8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.877720] env[69475]: DEBUG oslo_concurrency.lockutils [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] Lock "4f091501-351c-45b8-9f64-4d28d4623df8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.879256] env[69475]: DEBUG oslo_concurrency.lockutils [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] Lock "4f091501-351c-45b8-9f64-4d28d4623df8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.879256] env[69475]: DEBUG nova.compute.manager [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] No waiting events found dispatching network-vif-plugged-fbde5d12-5376-4f30-a0eb-1e63c7d36242 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1042.879256] env[69475]: WARNING nova.compute.manager [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Received unexpected event network-vif-plugged-fbde5d12-5376-4f30-a0eb-1e63c7d36242 for instance with vm_state building and task_state spawning. [ 1042.879256] env[69475]: DEBUG nova.compute.manager [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Received event network-changed-fbde5d12-5376-4f30-a0eb-1e63c7d36242 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1042.879256] env[69475]: DEBUG nova.compute.manager [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Refreshing instance network info cache due to event network-changed-fbde5d12-5376-4f30-a0eb-1e63c7d36242. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1042.879256] env[69475]: DEBUG oslo_concurrency.lockutils [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] Acquiring lock "refresh_cache-4f091501-351c-45b8-9f64-4d28d4623df8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.879729] env[69475]: DEBUG oslo_concurrency.lockutils [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] Acquired lock "refresh_cache-4f091501-351c-45b8-9f64-4d28d4623df8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1042.879729] env[69475]: DEBUG nova.network.neutron [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Refreshing network info cache for port fbde5d12-5376-4f30-a0eb-1e63c7d36242 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1042.994145] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528b51ee-2871-8145-a2ab-64e78eb71eda, 'name': SearchDatastore_Task, 'duration_secs': 0.016366} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.994654] env[69475]: DEBUG oslo_concurrency.lockutils [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.994960] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 20b37e69-5870-4f63-aeba-9293615da478/20b37e69-5870-4f63-aeba-9293615da478.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1042.995261] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-53228e8b-df43-4785-867b-80aa2753a10b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.002591] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1043.002591] env[69475]: value = "task-3508712" [ 1043.002591] env[69475]: _type = "Task" [ 1043.002591] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.016769] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508712, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.026484] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508709, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.032523] env[69475]: DEBUG nova.network.neutron [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Successfully created port: f26ed400-5630-4899-b5dd-a9af4540d3d7 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1043.077570] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b9a235-09ea-45f9-9a12-17d6f1035cd6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.085170] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a40a530-361f-4e7d-a246-4092159420b4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.128861] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d171626-8a3e-4a6c-9efb-43e8ff01e678 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.136774] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9381b9e8-e53f-43da-86a6-fc9239844b4e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.152755] env[69475]: DEBUG nova.compute.provider_tree [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1043.164223] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a1cffc81-8684-49ee-b0a7-378712aae884 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "24ef554b-30bf-4e28-856e-98eb7ec2618b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.794s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.181090] env[69475]: DEBUG oslo_vmware.api [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508711, 'name': PowerOnVM_Task, 'duration_secs': 0.436312} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.181352] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1043.186101] env[69475]: DEBUG nova.compute.manager [None req-60fb059a-0d3f-4a28-ba98-0487bf39ba85 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1043.186893] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c18526d4-8313-4fa8-af07-0380f1d9359a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.191977] env[69475]: DEBUG nova.compute.manager [req-d344e433-092d-4635-b390-0c1ae798f65d req-0fa60197-33dd-4a14-8fd8-5c512aae78ff service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Received event network-changed-9d51ee71-8419-4657-9a34-44bec2faf3c2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1043.192267] env[69475]: DEBUG nova.compute.manager [req-d344e433-092d-4635-b390-0c1ae798f65d req-0fa60197-33dd-4a14-8fd8-5c512aae78ff service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Refreshing instance network info cache due to event network-changed-9d51ee71-8419-4657-9a34-44bec2faf3c2. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1043.192462] env[69475]: DEBUG oslo_concurrency.lockutils [req-d344e433-092d-4635-b390-0c1ae798f65d req-0fa60197-33dd-4a14-8fd8-5c512aae78ff service nova] Acquiring lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.192622] env[69475]: DEBUG oslo_concurrency.lockutils [req-d344e433-092d-4635-b390-0c1ae798f65d req-0fa60197-33dd-4a14-8fd8-5c512aae78ff service nova] Acquired lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.193142] env[69475]: DEBUG nova.network.neutron [req-d344e433-092d-4635-b390-0c1ae798f65d req-0fa60197-33dd-4a14-8fd8-5c512aae78ff service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Refreshing network info cache for port 9d51ee71-8419-4657-9a34-44bec2faf3c2 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1043.338555] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508710, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.513479] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508712, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.526412] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508709, 'name': CloneVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.660874] env[69475]: DEBUG nova.scheduler.client.report [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1043.706612] env[69475]: DEBUG nova.compute.manager [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1043.751368] env[69475]: DEBUG nova.virt.hardware [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1043.751547] env[69475]: DEBUG nova.virt.hardware [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1043.751889] env[69475]: DEBUG nova.virt.hardware [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1043.751969] env[69475]: DEBUG nova.virt.hardware [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1043.752169] env[69475]: DEBUG nova.virt.hardware [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1043.752328] env[69475]: DEBUG nova.virt.hardware [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1043.752547] env[69475]: DEBUG nova.virt.hardware [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1043.752730] env[69475]: DEBUG nova.virt.hardware [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1043.752919] env[69475]: DEBUG nova.virt.hardware [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1043.753103] env[69475]: DEBUG nova.virt.hardware [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1043.753323] env[69475]: DEBUG nova.virt.hardware [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1043.754573] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8593fe-89b4-4201-9adf-bfa07b6f27db {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.765463] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0988c6b1-ca5e-4601-8259-091a9b312a60 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.785626] env[69475]: DEBUG nova.network.neutron [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Updated VIF entry in instance network info cache for port fbde5d12-5376-4f30-a0eb-1e63c7d36242. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1043.786910] env[69475]: DEBUG nova.network.neutron [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Updating instance_info_cache with network_info: [{"id": "fbde5d12-5376-4f30-a0eb-1e63c7d36242", "address": "fa:16:3e:dc:db:e1", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbde5d12-53", "ovs_interfaceid": "fbde5d12-5376-4f30-a0eb-1e63c7d36242", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.840059] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508710, 'name': CreateVM_Task, 'duration_secs': 1.443956} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.840059] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1043.840507] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.840665] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.843578] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1043.843578] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c2aadd6-9aa0-4289-ba52-0edf1aeb8fa9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.846789] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1043.846789] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525dabd6-ae8a-b2c8-dca6-fd90778fb1d4" [ 1043.846789] env[69475]: _type = "Task" [ 1043.846789] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.855912] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525dabd6-ae8a-b2c8-dca6-fd90778fb1d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.996176] env[69475]: DEBUG nova.network.neutron [req-d344e433-092d-4635-b390-0c1ae798f65d req-0fa60197-33dd-4a14-8fd8-5c512aae78ff service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Updated VIF entry in instance network info cache for port 9d51ee71-8419-4657-9a34-44bec2faf3c2. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1043.996756] env[69475]: DEBUG nova.network.neutron [req-d344e433-092d-4635-b390-0c1ae798f65d req-0fa60197-33dd-4a14-8fd8-5c512aae78ff service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Updating instance_info_cache with network_info: [{"id": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "address": "fa:16:3e:34:5a:a2", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d51ee71-84", "ovs_interfaceid": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.013634] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508712, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.029338] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508709, 'name': CloneVM_Task, 'duration_secs': 1.556534} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.029628] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Created linked-clone VM from snapshot [ 1044.030545] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af6b134-9002-484f-8b94-1dee662c46db {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.039438] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Uploading image a9351d13-720c-49e6-a8e9-3fac7da2b98a {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1044.076059] env[69475]: DEBUG oslo_vmware.rw_handles [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1044.076059] env[69475]: value = "vm-701094" [ 1044.076059] env[69475]: _type = "VirtualMachine" [ 1044.076059] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1044.076402] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-4895e6d8-acd9-4430-9dd8-feeb204d7085 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.085099] env[69475]: DEBUG oslo_vmware.rw_handles [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lease: (returnval){ [ 1044.085099] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52580cc4-3d00-b202-e579-a3fd904ceeb5" [ 1044.085099] env[69475]: _type = "HttpNfcLease" [ 1044.085099] env[69475]: } obtained for exporting VM: (result){ [ 1044.085099] env[69475]: value = "vm-701094" [ 1044.085099] env[69475]: _type = "VirtualMachine" [ 1044.085099] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1044.085513] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the lease: (returnval){ [ 1044.085513] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52580cc4-3d00-b202-e579-a3fd904ceeb5" [ 1044.085513] env[69475]: _type = "HttpNfcLease" [ 1044.085513] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1044.093279] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1044.093279] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52580cc4-3d00-b202-e579-a3fd904ceeb5" [ 1044.093279] env[69475]: _type = "HttpNfcLease" [ 1044.093279] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1044.167652] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.490s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.168408] env[69475]: DEBUG nova.compute.manager [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1044.170788] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.186s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.172594] env[69475]: INFO nova.compute.claims [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1044.294152] env[69475]: DEBUG oslo_concurrency.lockutils [req-b47d6fc1-2a17-47ca-9d1e-84a1d8d36058 req-1473284a-232f-4b9d-b0ce-1d29ce8d9d30 service nova] Releasing lock "refresh_cache-4f091501-351c-45b8-9f64-4d28d4623df8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.358267] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525dabd6-ae8a-b2c8-dca6-fd90778fb1d4, 'name': SearchDatastore_Task, 'duration_secs': 0.062619} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.358596] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.358836] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1044.359257] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.359414] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1044.359599] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1044.359904] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a6c85e88-3687-401a-9d75-ee7e3c85f0ca {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.377368] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1044.377513] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1044.378268] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30f6bbcd-9f7d-4705-b89f-59d5ae4d824b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.383689] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1044.383689] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52975dec-a3c7-266e-3d1f-52cd1163d3d7" [ 1044.383689] env[69475]: _type = "Task" [ 1044.383689] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.391693] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52975dec-a3c7-266e-3d1f-52cd1163d3d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.503020] env[69475]: DEBUG oslo_concurrency.lockutils [req-d344e433-092d-4635-b390-0c1ae798f65d req-0fa60197-33dd-4a14-8fd8-5c512aae78ff service nova] Releasing lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.513975] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508712, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.14456} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.514256] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 20b37e69-5870-4f63-aeba-9293615da478/20b37e69-5870-4f63-aeba-9293615da478.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1044.514472] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1044.514764] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a1232a7-24bd-4035-bb35-b1a375defaa7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.525021] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1044.525021] env[69475]: value = "task-3508714" [ 1044.525021] env[69475]: _type = "Task" [ 1044.525021] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.530945] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508714, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.596637] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1044.596637] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52580cc4-3d00-b202-e579-a3fd904ceeb5" [ 1044.596637] env[69475]: _type = "HttpNfcLease" [ 1044.596637] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1044.596945] env[69475]: DEBUG oslo_vmware.rw_handles [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1044.596945] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52580cc4-3d00-b202-e579-a3fd904ceeb5" [ 1044.596945] env[69475]: _type = "HttpNfcLease" [ 1044.596945] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1044.597788] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baae318e-f070-4f8b-a895-db4d0a4c3bf5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.606491] env[69475]: DEBUG oslo_vmware.rw_handles [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525d7216-dfae-94ab-bdd9-1dac724a69c2/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1044.606807] env[69475]: DEBUG oslo_vmware.rw_handles [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525d7216-dfae-94ab-bdd9-1dac724a69c2/disk-0.vmdk for reading. {{(pid=69475) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1044.679518] env[69475]: DEBUG nova.compute.utils [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1044.684052] env[69475]: DEBUG nova.compute.manager [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1044.684359] env[69475]: DEBUG nova.network.neutron [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1044.694400] env[69475]: DEBUG nova.compute.manager [req-4ec63d00-3b22-4f6b-bca6-d9b48b646ea7 req-050c4aac-a87f-4102-b62e-911c20042a4b service nova] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Received event network-vif-plugged-f26ed400-5630-4899-b5dd-a9af4540d3d7 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1044.694400] env[69475]: DEBUG oslo_concurrency.lockutils [req-4ec63d00-3b22-4f6b-bca6-d9b48b646ea7 req-050c4aac-a87f-4102-b62e-911c20042a4b service nova] Acquiring lock "ecf115fc-4ca1-41e2-ac42-82ec8154356e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.694400] env[69475]: DEBUG oslo_concurrency.lockutils [req-4ec63d00-3b22-4f6b-bca6-d9b48b646ea7 req-050c4aac-a87f-4102-b62e-911c20042a4b service nova] Lock "ecf115fc-4ca1-41e2-ac42-82ec8154356e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.695198] env[69475]: DEBUG oslo_concurrency.lockutils [req-4ec63d00-3b22-4f6b-bca6-d9b48b646ea7 req-050c4aac-a87f-4102-b62e-911c20042a4b service nova] Lock "ecf115fc-4ca1-41e2-ac42-82ec8154356e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.696100] env[69475]: DEBUG nova.compute.manager [req-4ec63d00-3b22-4f6b-bca6-d9b48b646ea7 req-050c4aac-a87f-4102-b62e-911c20042a4b service nova] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] No waiting events found dispatching network-vif-plugged-f26ed400-5630-4899-b5dd-a9af4540d3d7 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1044.696100] env[69475]: WARNING nova.compute.manager [req-4ec63d00-3b22-4f6b-bca6-d9b48b646ea7 req-050c4aac-a87f-4102-b62e-911c20042a4b service nova] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Received unexpected event network-vif-plugged-f26ed400-5630-4899-b5dd-a9af4540d3d7 for instance with vm_state building and task_state spawning. [ 1044.755478] env[69475]: DEBUG nova.policy [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e067d26583734092863fe6198f686cf6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0b78b5a681da4ce1a3ed403bccdf88b7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1044.814568] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3690bd8d-b08d-456b-ac56-80b023f00d62 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.846418] env[69475]: DEBUG nova.network.neutron [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Successfully updated port: f26ed400-5630-4899-b5dd-a9af4540d3d7 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1044.894189] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52975dec-a3c7-266e-3d1f-52cd1163d3d7, 'name': SearchDatastore_Task, 'duration_secs': 0.053376} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.894960] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68bb664d-8535-45f6-85ed-9f742b2abfdc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.900356] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1044.900356] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e480ca-cf76-b8a9-89f8-e9b551dd1084" [ 1044.900356] env[69475]: _type = "Task" [ 1044.900356] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.908569] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e480ca-cf76-b8a9-89f8-e9b551dd1084, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.031253] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508714, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.1081} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.031610] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1045.032314] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18f6e416-7de4-4560-a740-9b924d0b68e7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.054664] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] 20b37e69-5870-4f63-aeba-9293615da478/20b37e69-5870-4f63-aeba-9293615da478.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1045.056797] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64159ef2-5c40-4043-ad12-1460a4763b7d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.077802] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1045.077802] env[69475]: value = "task-3508715" [ 1045.077802] env[69475]: _type = "Task" [ 1045.077802] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.087115] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508715, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.117238] env[69475]: DEBUG nova.network.neutron [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Successfully created port: d25c0e76-62cc-44b6-936c-43b7de37c528 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1045.178777] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "24ef554b-30bf-4e28-856e-98eb7ec2618b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.179058] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "24ef554b-30bf-4e28-856e-98eb7ec2618b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.179276] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "24ef554b-30bf-4e28-856e-98eb7ec2618b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.179448] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "24ef554b-30bf-4e28-856e-98eb7ec2618b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.179614] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "24ef554b-30bf-4e28-856e-98eb7ec2618b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1045.181902] env[69475]: INFO nova.compute.manager [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Terminating instance [ 1045.185118] env[69475]: DEBUG nova.compute.manager [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1045.351126] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "refresh_cache-ecf115fc-4ca1-41e2-ac42-82ec8154356e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.351404] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "refresh_cache-ecf115fc-4ca1-41e2-ac42-82ec8154356e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.351648] env[69475]: DEBUG nova.network.neutron [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1045.416432] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e480ca-cf76-b8a9-89f8-e9b551dd1084, 'name': SearchDatastore_Task, 'duration_secs': 0.046755} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.416843] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.417379] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 4f091501-351c-45b8-9f64-4d28d4623df8/4f091501-351c-45b8-9f64-4d28d4623df8.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1045.421018] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-00164a38-0675-4635-b02f-84d9918b2e9b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.426081] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1045.426081] env[69475]: value = "task-3508716" [ 1045.426081] env[69475]: _type = "Task" [ 1045.426081] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.437867] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508716, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.558688] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2abf1fb-9b17-4657-a7f3-59c9f95933cc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.566888] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5072af-02ae-4ebd-bc25-a4f80ea35ea2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.608383] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cf4c930-00a1-4c8d-b279-98f2bc9d52b0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.620505] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3637de6b-de21-42ae-84a3-c12505cfb290 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.625135] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508715, 'name': ReconfigVM_Task, 'duration_secs': 0.423283} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.625639] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Reconfigured VM instance instance-00000061 to attach disk [datastore2] 20b37e69-5870-4f63-aeba-9293615da478/20b37e69-5870-4f63-aeba-9293615da478.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1045.627013] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f050393-99eb-4c0e-926e-0c5c0c6aab14 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.638219] env[69475]: DEBUG nova.compute.provider_tree [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1045.645909] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1045.645909] env[69475]: value = "task-3508717" [ 1045.645909] env[69475]: _type = "Task" [ 1045.645909] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.660311] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508717, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.686494] env[69475]: DEBUG nova.compute.manager [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1045.686666] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1045.688254] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ece202-17e8-49da-904b-dd132d426f94 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.702224] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1045.702791] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6c261ba9-d971-47bd-aa76-ffa90ed56312 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.712483] env[69475]: DEBUG oslo_vmware.api [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 1045.712483] env[69475]: value = "task-3508718" [ 1045.712483] env[69475]: _type = "Task" [ 1045.712483] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.726139] env[69475]: DEBUG oslo_vmware.api [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508718, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.902430] env[69475]: DEBUG nova.network.neutron [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1045.946888] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508716, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.142083] env[69475]: DEBUG nova.scheduler.client.report [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1046.156715] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508717, 'name': Rename_Task, 'duration_secs': 0.437633} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.157731] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1046.157731] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f1356a7f-de21-485d-a79a-50191bd9115d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.164432] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1046.164432] env[69475]: value = "task-3508719" [ 1046.164432] env[69475]: _type = "Task" [ 1046.164432] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.174067] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508719, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.199334] env[69475]: DEBUG nova.compute.manager [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1046.233165] env[69475]: DEBUG oslo_vmware.api [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508718, 'name': PowerOffVM_Task, 'duration_secs': 0.263706} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.234159] env[69475]: DEBUG nova.network.neutron [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Updating instance_info_cache with network_info: [{"id": "f26ed400-5630-4899-b5dd-a9af4540d3d7", "address": "fa:16:3e:21:11:49", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf26ed400-56", "ovs_interfaceid": "f26ed400-5630-4899-b5dd-a9af4540d3d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.235716] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1046.235895] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1046.236388] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f37a3aa1-c092-4dbb-8e23-ce9d6530f7d7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.242625] env[69475]: DEBUG nova.virt.hardware [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1046.242625] env[69475]: DEBUG nova.virt.hardware [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1046.242625] env[69475]: DEBUG nova.virt.hardware [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1046.242625] env[69475]: DEBUG nova.virt.hardware [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1046.242625] env[69475]: DEBUG nova.virt.hardware [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1046.242625] env[69475]: DEBUG nova.virt.hardware [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1046.243100] env[69475]: DEBUG nova.virt.hardware [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1046.243293] env[69475]: DEBUG nova.virt.hardware [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1046.243471] env[69475]: DEBUG nova.virt.hardware [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1046.243646] env[69475]: DEBUG nova.virt.hardware [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1046.246023] env[69475]: DEBUG nova.virt.hardware [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1046.246023] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8ab4f4-b6bd-432a-995a-6d0aff9589c3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.255688] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416b9103-2af2-46bd-ad9f-c528d24b14c0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.319076] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1046.319315] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1046.319560] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleting the datastore file [datastore1] 24ef554b-30bf-4e28-856e-98eb7ec2618b {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1046.319851] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3a57c196-6279-4e8f-8ba9-29587e7e4553 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.326278] env[69475]: DEBUG oslo_vmware.api [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for the task: (returnval){ [ 1046.326278] env[69475]: value = "task-3508721" [ 1046.326278] env[69475]: _type = "Task" [ 1046.326278] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.335733] env[69475]: DEBUG oslo_vmware.api [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508721, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.371664] env[69475]: INFO nova.compute.manager [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Rescuing [ 1046.371862] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "refresh_cache-4100fb43-1dae-40b1-8caa-11dd67962274" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.372082] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired lock "refresh_cache-4100fb43-1dae-40b1-8caa-11dd67962274" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.372382] env[69475]: DEBUG nova.network.neutron [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1046.437398] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508716, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.598474} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.437723] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 4f091501-351c-45b8-9f64-4d28d4623df8/4f091501-351c-45b8-9f64-4d28d4623df8.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1046.437958] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1046.438242] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-298879a5-6a6a-4240-a851-becbadbd12e9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.445274] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1046.445274] env[69475]: value = "task-3508722" [ 1046.445274] env[69475]: _type = "Task" [ 1046.445274] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.454219] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508722, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.647660] env[69475]: DEBUG nova.compute.manager [req-63e79230-7fa3-4224-bc1e-61352032f745 req-2ac37d87-af01-43d1-96c5-0aa947a20b75 service nova] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Received event network-vif-plugged-d25c0e76-62cc-44b6-936c-43b7de37c528 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1046.647893] env[69475]: DEBUG oslo_concurrency.lockutils [req-63e79230-7fa3-4224-bc1e-61352032f745 req-2ac37d87-af01-43d1-96c5-0aa947a20b75 service nova] Acquiring lock "cd0e8c6a-700a-47f8-9a4c-054b84a59a7f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.648177] env[69475]: DEBUG oslo_concurrency.lockutils [req-63e79230-7fa3-4224-bc1e-61352032f745 req-2ac37d87-af01-43d1-96c5-0aa947a20b75 service nova] Lock "cd0e8c6a-700a-47f8-9a4c-054b84a59a7f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.648366] env[69475]: DEBUG oslo_concurrency.lockutils [req-63e79230-7fa3-4224-bc1e-61352032f745 req-2ac37d87-af01-43d1-96c5-0aa947a20b75 service nova] Lock "cd0e8c6a-700a-47f8-9a4c-054b84a59a7f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.648718] env[69475]: DEBUG nova.compute.manager [req-63e79230-7fa3-4224-bc1e-61352032f745 req-2ac37d87-af01-43d1-96c5-0aa947a20b75 service nova] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] No waiting events found dispatching network-vif-plugged-d25c0e76-62cc-44b6-936c-43b7de37c528 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1046.648887] env[69475]: WARNING nova.compute.manager [req-63e79230-7fa3-4224-bc1e-61352032f745 req-2ac37d87-af01-43d1-96c5-0aa947a20b75 service nova] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Received unexpected event network-vif-plugged-d25c0e76-62cc-44b6-936c-43b7de37c528 for instance with vm_state building and task_state spawning. [ 1046.651231] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.480s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.651742] env[69475]: DEBUG nova.compute.manager [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1046.654438] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 9.229s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.654641] env[69475]: DEBUG nova.objects.instance [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69475) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1046.675866] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508719, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.721124] env[69475]: DEBUG nova.network.neutron [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Successfully updated port: d25c0e76-62cc-44b6-936c-43b7de37c528 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1046.732090] env[69475]: DEBUG nova.compute.manager [req-3afac85a-75b0-4cfe-be3a-895fc65e4a51 req-d1968d80-d64e-467b-9149-9abca3b8ff7c service nova] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Received event network-changed-f26ed400-5630-4899-b5dd-a9af4540d3d7 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1046.732775] env[69475]: DEBUG nova.compute.manager [req-3afac85a-75b0-4cfe-be3a-895fc65e4a51 req-d1968d80-d64e-467b-9149-9abca3b8ff7c service nova] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Refreshing instance network info cache due to event network-changed-f26ed400-5630-4899-b5dd-a9af4540d3d7. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1046.732775] env[69475]: DEBUG oslo_concurrency.lockutils [req-3afac85a-75b0-4cfe-be3a-895fc65e4a51 req-d1968d80-d64e-467b-9149-9abca3b8ff7c service nova] Acquiring lock "refresh_cache-ecf115fc-4ca1-41e2-ac42-82ec8154356e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1046.736986] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "refresh_cache-ecf115fc-4ca1-41e2-ac42-82ec8154356e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.737881] env[69475]: DEBUG nova.compute.manager [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Instance network_info: |[{"id": "f26ed400-5630-4899-b5dd-a9af4540d3d7", "address": "fa:16:3e:21:11:49", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf26ed400-56", "ovs_interfaceid": "f26ed400-5630-4899-b5dd-a9af4540d3d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1046.738135] env[69475]: DEBUG oslo_concurrency.lockutils [req-3afac85a-75b0-4cfe-be3a-895fc65e4a51 req-d1968d80-d64e-467b-9149-9abca3b8ff7c service nova] Acquired lock "refresh_cache-ecf115fc-4ca1-41e2-ac42-82ec8154356e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1046.738252] env[69475]: DEBUG nova.network.neutron [req-3afac85a-75b0-4cfe-be3a-895fc65e4a51 req-d1968d80-d64e-467b-9149-9abca3b8ff7c service nova] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Refreshing network info cache for port f26ed400-5630-4899-b5dd-a9af4540d3d7 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1046.739628] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:11:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f26ed400-5630-4899-b5dd-a9af4540d3d7', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1046.747345] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1046.751056] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1046.752063] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-18ed463c-fb71-4048-8a29-ed0ee9541956 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.773253] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1046.773253] env[69475]: value = "task-3508723" [ 1046.773253] env[69475]: _type = "Task" [ 1046.773253] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.781037] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508723, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.838048] env[69475]: DEBUG oslo_vmware.api [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Task: {'id': task-3508721, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18566} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.838048] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1046.838048] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1046.838296] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1046.838471] env[69475]: INFO nova.compute.manager [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1046.838682] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1046.841550] env[69475]: DEBUG nova.compute.manager [-] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1046.841720] env[69475]: DEBUG nova.network.neutron [-] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1046.963220] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508722, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06923} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.963733] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1046.965090] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c5867e-1b97-4235-8257-4e06454e2162 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.999837] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 4f091501-351c-45b8-9f64-4d28d4623df8/4f091501-351c-45b8-9f64-4d28d4623df8.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1047.001377] env[69475]: DEBUG nova.network.neutron [req-3afac85a-75b0-4cfe-be3a-895fc65e4a51 req-d1968d80-d64e-467b-9149-9abca3b8ff7c service nova] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Updated VIF entry in instance network info cache for port f26ed400-5630-4899-b5dd-a9af4540d3d7. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1047.001899] env[69475]: DEBUG nova.network.neutron [req-3afac85a-75b0-4cfe-be3a-895fc65e4a51 req-d1968d80-d64e-467b-9149-9abca3b8ff7c service nova] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Updating instance_info_cache with network_info: [{"id": "f26ed400-5630-4899-b5dd-a9af4540d3d7", "address": "fa:16:3e:21:11:49", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf26ed400-56", "ovs_interfaceid": "f26ed400-5630-4899-b5dd-a9af4540d3d7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.003546] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e2d4167-7dc2-4b24-90c2-b3f2f9deda6c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.038505] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1047.038505] env[69475]: value = "task-3508724" [ 1047.038505] env[69475]: _type = "Task" [ 1047.038505] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.050713] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508724, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.163405] env[69475]: DEBUG nova.compute.utils [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1047.168917] env[69475]: DEBUG nova.compute.manager [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1047.169508] env[69475]: DEBUG nova.network.neutron [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1047.188666] env[69475]: DEBUG oslo_vmware.api [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508719, 'name': PowerOnVM_Task, 'duration_secs': 0.56476} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.189351] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1047.189904] env[69475]: INFO nova.compute.manager [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Took 10.34 seconds to spawn the instance on the hypervisor. [ 1047.191777] env[69475]: DEBUG nova.compute.manager [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1047.191777] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3589ea6-3bdb-41bf-8d01-7ba774cbfe30 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.224346] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Acquiring lock "refresh_cache-cd0e8c6a-700a-47f8-9a4c-054b84a59a7f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.224522] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Acquired lock "refresh_cache-cd0e8c6a-700a-47f8-9a4c-054b84a59a7f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1047.224996] env[69475]: DEBUG nova.network.neutron [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1047.229293] env[69475]: DEBUG nova.policy [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e30c3e1103cf49e98a81302370e11052', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4d19def3c6f64a21bfaa1e8451186234', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1047.287421] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508723, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.358776] env[69475]: DEBUG nova.network.neutron [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Updating instance_info_cache with network_info: [{"id": "72e7aa25-953c-4253-8e6e-6543fd67af89", "address": "fa:16:3e:a2:e8:60", "network": {"id": "a5ecc342-ff26-4d68-9810-30407b73463d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-941429832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a68f54aa603f46468f50c83cd4fa3e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72e7aa25-95", "ovs_interfaceid": "72e7aa25-953c-4253-8e6e-6543fd67af89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.514674] env[69475]: DEBUG nova.network.neutron [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Successfully created port: 92c1a899-25cb-4f56-9e5c-ef5387893a31 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1047.528912] env[69475]: DEBUG oslo_concurrency.lockutils [req-3afac85a-75b0-4cfe-be3a-895fc65e4a51 req-d1968d80-d64e-467b-9149-9abca3b8ff7c service nova] Releasing lock "refresh_cache-ecf115fc-4ca1-41e2-ac42-82ec8154356e" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.553492] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508724, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.670083] env[69475]: DEBUG nova.compute.manager [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1047.676140] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c23bb0a4-f34d-46a5-af1c-a40faaa6d99b tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.676140] env[69475]: DEBUG oslo_concurrency.lockutils [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 9.942s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1047.722023] env[69475]: INFO nova.compute.manager [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Took 27.13 seconds to build instance. [ 1047.754048] env[69475]: DEBUG nova.network.neutron [-] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1047.773477] env[69475]: DEBUG nova.network.neutron [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1047.789086] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508723, 'name': CreateVM_Task, 'duration_secs': 0.552877} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.789086] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1047.789086] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1047.789086] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1047.789086] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1047.789086] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31632f96-d7e2-489b-96dd-09290e8fb428 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.793353] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1047.793353] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52844f0a-09ee-783f-ef24-9db07908f91c" [ 1047.793353] env[69475]: _type = "Task" [ 1047.793353] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.803370] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52844f0a-09ee-783f-ef24-9db07908f91c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.863794] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Releasing lock "refresh_cache-4100fb43-1dae-40b1-8caa-11dd67962274" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.018613] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "ff09407e-93ea-4919-ba5f-b7ee6dd018a4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.018903] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "ff09407e-93ea-4919-ba5f-b7ee6dd018a4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.019145] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "ff09407e-93ea-4919-ba5f-b7ee6dd018a4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.019337] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "ff09407e-93ea-4919-ba5f-b7ee6dd018a4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.019513] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "ff09407e-93ea-4919-ba5f-b7ee6dd018a4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.022167] env[69475]: INFO nova.compute.manager [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Terminating instance [ 1048.045985] env[69475]: DEBUG nova.network.neutron [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Updating instance_info_cache with network_info: [{"id": "d25c0e76-62cc-44b6-936c-43b7de37c528", "address": "fa:16:3e:38:25:d7", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd25c0e76-62", "ovs_interfaceid": "d25c0e76-62cc-44b6-936c-43b7de37c528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1048.050619] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508724, 'name': ReconfigVM_Task, 'duration_secs': 0.534122} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.051079] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 4f091501-351c-45b8-9f64-4d28d4623df8/4f091501-351c-45b8-9f64-4d28d4623df8.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1048.051683] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-46f93af2-711d-40e5-97fa-561a6a341311 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.059412] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1048.059412] env[69475]: value = "task-3508725" [ 1048.059412] env[69475]: _type = "Task" [ 1048.059412] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.074194] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508725, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.224311] env[69475]: DEBUG oslo_concurrency.lockutils [None req-35e8a4e1-99dc-4ff8-9f46-d11afc5f03ac tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "20b37e69-5870-4f63-aeba-9293615da478" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.641s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.257174] env[69475]: INFO nova.compute.manager [-] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Took 1.42 seconds to deallocate network for instance. [ 1048.307394] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52844f0a-09ee-783f-ef24-9db07908f91c, 'name': SearchDatastore_Task, 'duration_secs': 0.043955} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.309987] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.310232] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1048.310464] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.310606] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1048.310784] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1048.311391] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9a5d1bc-a9c7-4a1e-bd27-45492e09dffa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.320957] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1048.321158] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1048.324291] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb1e6996-db0b-4fba-864d-a9a93de47650 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.330265] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1048.330265] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522fba67-81ba-df04-6b62-0dcd5360b5de" [ 1048.330265] env[69475]: _type = "Task" [ 1048.330265] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.339018] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522fba67-81ba-df04-6b62-0dcd5360b5de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.494233] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32be5715-ac5d-4555-b41f-55a11eb41b6b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.501240] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2626b94d-4ca7-4946-8b2f-312deb1fbba7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.533494] env[69475]: DEBUG nova.compute.manager [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1048.533494] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1048.534637] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-180476a5-6063-45e0-b25e-6deda256cd0b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.537876] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760f48e3-1882-4813-bf41-4951db3ca58e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.549345] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fe23c00-1b06-4b42-9d5c-1617433c8e71 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.553961] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1048.554855] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Releasing lock "refresh_cache-cd0e8c6a-700a-47f8-9a4c-054b84a59a7f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1048.555230] env[69475]: DEBUG nova.compute.manager [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Instance network_info: |[{"id": "d25c0e76-62cc-44b6-936c-43b7de37c528", "address": "fa:16:3e:38:25:d7", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd25c0e76-62", "ovs_interfaceid": "d25c0e76-62cc-44b6-936c-43b7de37c528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1048.556357] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c24e41ad-be41-4875-8d34-f009ad99fe2e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.557588] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:25:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ea45c024-d603-4bac-9c1b-f302437ea4fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd25c0e76-62cc-44b6-936c-43b7de37c528', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1048.565417] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Creating folder: Project (0b78b5a681da4ce1a3ed403bccdf88b7). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1048.566281] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9aa419f-06f7-4311-a277-f539d51283c2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.584180] env[69475]: DEBUG nova.compute.provider_tree [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1048.587054] env[69475]: DEBUG oslo_vmware.api [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1048.587054] env[69475]: value = "task-3508726" [ 1048.587054] env[69475]: _type = "Task" [ 1048.587054] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.594969] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508725, 'name': Rename_Task, 'duration_secs': 0.31575} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.597128] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1048.597554] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Created folder: Project (0b78b5a681da4ce1a3ed403bccdf88b7) in parent group-v700823. [ 1048.597753] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Creating folder: Instances. Parent ref: group-v701097. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1048.598814] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f0840b4-fd04-4205-9e25-fc36a13c252f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.600387] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ab1c4a03-2898-4566-a1fa-8ba3608f259d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.606103] env[69475]: DEBUG oslo_vmware.api [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508726, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.611898] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1048.611898] env[69475]: value = "task-3508728" [ 1048.611898] env[69475]: _type = "Task" [ 1048.611898] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.625135] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508728, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.625763] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Created folder: Instances in parent group-v701097. [ 1048.626021] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1048.626248] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1048.626486] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1aef5d64-19b7-4e41-ac1a-ccee5db4cab6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.650671] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1048.650671] env[69475]: value = "task-3508730" [ 1048.650671] env[69475]: _type = "Task" [ 1048.650671] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.661400] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508730, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.679367] env[69475]: DEBUG nova.compute.manager [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1048.710536] env[69475]: DEBUG nova.virt.hardware [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1048.710795] env[69475]: DEBUG nova.virt.hardware [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1048.710952] env[69475]: DEBUG nova.virt.hardware [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1048.711192] env[69475]: DEBUG nova.virt.hardware [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1048.711349] env[69475]: DEBUG nova.virt.hardware [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1048.711497] env[69475]: DEBUG nova.virt.hardware [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1048.711746] env[69475]: DEBUG nova.virt.hardware [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1048.711936] env[69475]: DEBUG nova.virt.hardware [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1048.712128] env[69475]: DEBUG nova.virt.hardware [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1048.712295] env[69475]: DEBUG nova.virt.hardware [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1048.712467] env[69475]: DEBUG nova.virt.hardware [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1048.713697] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c0d0031-c8ae-4d5b-9035-5118601c4211 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.723339] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a13b8334-0174-4d63-8eec-5eb3d6bb0246 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.763680] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.773589] env[69475]: DEBUG nova.compute.manager [req-ff383e1e-bcb5-469a-aa82-a7e1157b2b74 req-bb18b575-70a4-400a-b0de-337015088f52 service nova] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Received event network-changed-d25c0e76-62cc-44b6-936c-43b7de37c528 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1048.774031] env[69475]: DEBUG nova.compute.manager [req-ff383e1e-bcb5-469a-aa82-a7e1157b2b74 req-bb18b575-70a4-400a-b0de-337015088f52 service nova] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Refreshing instance network info cache due to event network-changed-d25c0e76-62cc-44b6-936c-43b7de37c528. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1048.774308] env[69475]: DEBUG oslo_concurrency.lockutils [req-ff383e1e-bcb5-469a-aa82-a7e1157b2b74 req-bb18b575-70a4-400a-b0de-337015088f52 service nova] Acquiring lock "refresh_cache-cd0e8c6a-700a-47f8-9a4c-054b84a59a7f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.774418] env[69475]: DEBUG oslo_concurrency.lockutils [req-ff383e1e-bcb5-469a-aa82-a7e1157b2b74 req-bb18b575-70a4-400a-b0de-337015088f52 service nova] Acquired lock "refresh_cache-cd0e8c6a-700a-47f8-9a4c-054b84a59a7f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1048.774607] env[69475]: DEBUG nova.network.neutron [req-ff383e1e-bcb5-469a-aa82-a7e1157b2b74 req-bb18b575-70a4-400a-b0de-337015088f52 service nova] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Refreshing network info cache for port d25c0e76-62cc-44b6-936c-43b7de37c528 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1048.797039] env[69475]: DEBUG nova.compute.manager [req-1e77b25a-2894-483b-84b9-0be93848b303 req-802145ab-e9d1-4a9d-b779-37a712366d53 service nova] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Received event network-vif-deleted-cf56fb83-6fba-4e69-9e72-3cd7f5dd266c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1048.841725] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522fba67-81ba-df04-6b62-0dcd5360b5de, 'name': SearchDatastore_Task, 'duration_secs': 0.011516} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.842668] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62a0b0c8-5909-497e-a911-0d0905e7e638 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.849166] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1048.849166] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e0fc1c-0b9f-b540-9aae-1be46eb28a0b" [ 1048.849166] env[69475]: _type = "Task" [ 1048.849166] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.858415] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e0fc1c-0b9f-b540-9aae-1be46eb28a0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.055598] env[69475]: DEBUG nova.network.neutron [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Successfully updated port: 92c1a899-25cb-4f56-9e5c-ef5387893a31 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1049.088892] env[69475]: DEBUG nova.scheduler.client.report [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1049.105351] env[69475]: DEBUG oslo_vmware.api [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508726, 'name': PowerOffVM_Task, 'duration_secs': 0.291804} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.105905] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1049.105905] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1049.106108] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36a73428-1c6a-4779-b3e8-93fe20efdd66 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.126369] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508728, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.162105] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508730, 'name': CreateVM_Task, 'duration_secs': 0.365459} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.162331] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1049.163460] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.163783] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1049.164313] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1049.164765] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19231000-3798-4c9e-becc-d5df87925213 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.171817] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Waiting for the task: (returnval){ [ 1049.171817] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b4e86e-4236-2659-cac7-13d97ce281fb" [ 1049.171817] env[69475]: _type = "Task" [ 1049.171817] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.181083] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1049.181309] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1049.181555] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Deleting the datastore file [datastore1] ff09407e-93ea-4919-ba5f-b7ee6dd018a4 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1049.185515] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-571e23f1-67cf-46ed-aeee-c5f1078defee {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.192054] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b4e86e-4236-2659-cac7-13d97ce281fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.198340] env[69475]: DEBUG oslo_vmware.api [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1049.198340] env[69475]: value = "task-3508732" [ 1049.198340] env[69475]: _type = "Task" [ 1049.198340] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.207391] env[69475]: DEBUG oslo_vmware.api [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508732, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.361850] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e0fc1c-0b9f-b540-9aae-1be46eb28a0b, 'name': SearchDatastore_Task, 'duration_secs': 0.013831} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.362174] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1049.362539] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] ecf115fc-4ca1-41e2-ac42-82ec8154356e/ecf115fc-4ca1-41e2-ac42-82ec8154356e.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1049.362884] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6bef1b7-1583-4dd3-bb87-d638080630b7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.371137] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1049.371137] env[69475]: value = "task-3508733" [ 1049.371137] env[69475]: _type = "Task" [ 1049.371137] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.382297] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508733, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.412897] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1049.413262] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd526bc5-b8ac-4277-bb37-f9e4a3efe2f0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.420912] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1049.420912] env[69475]: value = "task-3508734" [ 1049.420912] env[69475]: _type = "Task" [ 1049.420912] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.430585] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508734, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.503398] env[69475]: DEBUG nova.network.neutron [req-ff383e1e-bcb5-469a-aa82-a7e1157b2b74 req-bb18b575-70a4-400a-b0de-337015088f52 service nova] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Updated VIF entry in instance network info cache for port d25c0e76-62cc-44b6-936c-43b7de37c528. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1049.504040] env[69475]: DEBUG nova.network.neutron [req-ff383e1e-bcb5-469a-aa82-a7e1157b2b74 req-bb18b575-70a4-400a-b0de-337015088f52 service nova] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Updating instance_info_cache with network_info: [{"id": "d25c0e76-62cc-44b6-936c-43b7de37c528", "address": "fa:16:3e:38:25:d7", "network": {"id": "85a10cfc-543c-49cd-95a7-0778b7d703b2", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "34b98c3722d74fe5827d9c95c1df7a95", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ea45c024-d603-4bac-9c1b-f302437ea4fe", "external-id": "nsx-vlan-transportzone-946", "segmentation_id": 946, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd25c0e76-62", "ovs_interfaceid": "d25c0e76-62cc-44b6-936c-43b7de37c528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.558915] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Acquiring lock "refresh_cache-951c225b-d930-449f-81b5-4f28f9dd27e5" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.559242] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Acquired lock "refresh_cache-951c225b-d930-449f-81b5-4f28f9dd27e5" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1049.559533] env[69475]: DEBUG nova.network.neutron [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1049.623889] env[69475]: DEBUG oslo_vmware.api [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508728, 'name': PowerOnVM_Task, 'duration_secs': 0.673136} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.624774] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1049.624992] env[69475]: INFO nova.compute.manager [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Took 10.29 seconds to spawn the instance on the hypervisor. [ 1049.625201] env[69475]: DEBUG nova.compute.manager [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1049.626084] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7cff33f-9a6e-4265-9d51-6be8b802257a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.684368] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b4e86e-4236-2659-cac7-13d97ce281fb, 'name': SearchDatastore_Task, 'duration_secs': 0.014572} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.685655] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1049.685929] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1049.686229] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1049.686385] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1049.686570] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1049.687151] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e3dbd286-d6d6-43c0-9d44-15dc34962c6e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.709224] env[69475]: DEBUG oslo_vmware.api [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508732, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.190188} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.709330] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1049.709475] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1049.709648] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1049.709820] env[69475]: INFO nova.compute.manager [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1049.710154] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1049.710488] env[69475]: DEBUG nova.compute.manager [-] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1049.710659] env[69475]: DEBUG nova.network.neutron [-] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1049.720073] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1049.720294] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1049.723122] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-978d485e-0d12-4913-af4c-6bea06772b73 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.729287] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Waiting for the task: (returnval){ [ 1049.729287] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c5bef8-ce9e-44fc-61bd-5f5f6be13973" [ 1049.729287] env[69475]: _type = "Task" [ 1049.729287] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.741794] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c5bef8-ce9e-44fc-61bd-5f5f6be13973, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.886074] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508733, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.931875] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508734, 'name': PowerOffVM_Task, 'duration_secs': 0.195817} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.932187] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1049.933097] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7638269-b74f-4e5c-bdaa-a827e2719ead {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.953194] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d073fa1e-9d98-4e3e-9ea4-beac3113831c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.987521] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1049.987844] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-896d6089-4f62-46cf-93a4-dd8562a52072 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.994652] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1049.994652] env[69475]: value = "task-3508735" [ 1049.994652] env[69475]: _type = "Task" [ 1049.994652] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.002833] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508735, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.006493] env[69475]: DEBUG oslo_concurrency.lockutils [req-ff383e1e-bcb5-469a-aa82-a7e1157b2b74 req-bb18b575-70a4-400a-b0de-337015088f52 service nova] Releasing lock "refresh_cache-cd0e8c6a-700a-47f8-9a4c-054b84a59a7f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1050.006751] env[69475]: DEBUG nova.compute.manager [req-ff383e1e-bcb5-469a-aa82-a7e1157b2b74 req-bb18b575-70a4-400a-b0de-337015088f52 service nova] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Received event network-changed-316b2c71-6909-4d98-a09c-c3c58878a1ed {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1050.006902] env[69475]: DEBUG nova.compute.manager [req-ff383e1e-bcb5-469a-aa82-a7e1157b2b74 req-bb18b575-70a4-400a-b0de-337015088f52 service nova] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Refreshing instance network info cache due to event network-changed-316b2c71-6909-4d98-a09c-c3c58878a1ed. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1050.007133] env[69475]: DEBUG oslo_concurrency.lockutils [req-ff383e1e-bcb5-469a-aa82-a7e1157b2b74 req-bb18b575-70a4-400a-b0de-337015088f52 service nova] Acquiring lock "refresh_cache-20b37e69-5870-4f63-aeba-9293615da478" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.007275] env[69475]: DEBUG oslo_concurrency.lockutils [req-ff383e1e-bcb5-469a-aa82-a7e1157b2b74 req-bb18b575-70a4-400a-b0de-337015088f52 service nova] Acquired lock "refresh_cache-20b37e69-5870-4f63-aeba-9293615da478" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1050.007484] env[69475]: DEBUG nova.network.neutron [req-ff383e1e-bcb5-469a-aa82-a7e1157b2b74 req-bb18b575-70a4-400a-b0de-337015088f52 service nova] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Refreshing network info cache for port 316b2c71-6909-4d98-a09c-c3c58878a1ed {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1050.109764] env[69475]: DEBUG oslo_concurrency.lockutils [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.434s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.110013] env[69475]: DEBUG nova.compute.manager [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=69475) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 1050.113595] env[69475]: DEBUG nova.network.neutron [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1050.115677] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.189s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.117168] env[69475]: INFO nova.compute.claims [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1050.148870] env[69475]: INFO nova.compute.manager [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Took 26.10 seconds to build instance. [ 1050.243065] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c5bef8-ce9e-44fc-61bd-5f5f6be13973, 'name': SearchDatastore_Task, 'duration_secs': 0.024991} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.243065] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-588e4d0e-f752-46b5-b38a-201aff161dec {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.249057] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Waiting for the task: (returnval){ [ 1050.249057] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f8bf28-b4c6-e562-acc0-489f52cf836d" [ 1050.249057] env[69475]: _type = "Task" [ 1050.249057] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.257942] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0b472591-92ad-45df-b262-ec70bd29fb4b tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "e8c2d21e-2e42-48de-928e-c5fd944899b6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.260081] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0b472591-92ad-45df-b262-ec70bd29fb4b tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e8c2d21e-2e42-48de-928e-c5fd944899b6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.260081] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f8bf28-b4c6-e562-acc0-489f52cf836d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.389805] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508733, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.615299} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.389805] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] ecf115fc-4ca1-41e2-ac42-82ec8154356e/ecf115fc-4ca1-41e2-ac42-82ec8154356e.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1050.389805] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1050.389805] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97311693-29b8-4d8f-be39-36d1129d6666 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.396025] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1050.396025] env[69475]: value = "task-3508736" [ 1050.396025] env[69475]: _type = "Task" [ 1050.396025] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.408461] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508736, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.507925] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] VM already powered off {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1050.507925] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1050.507925] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.654646] env[69475]: DEBUG oslo_concurrency.lockutils [None req-63c7e70f-e93b-42f0-afdc-40af7df0eacc tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "4f091501-351c-45b8-9f64-4d28d4623df8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.614s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.682237] env[69475]: DEBUG nova.network.neutron [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Updating instance_info_cache with network_info: [{"id": "92c1a899-25cb-4f56-9e5c-ef5387893a31", "address": "fa:16:3e:f5:d4:bb", "network": {"id": "22b11011-56f9-42ef-852d-f2cb76ffbfa9", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-524720824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d19def3c6f64a21bfaa1e8451186234", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "605f83bd-808c-4b54-922e-54b14690987a", "external-id": "nsx-vlan-transportzone-25", "segmentation_id": 25, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92c1a899-25", "ovs_interfaceid": "92c1a899-25cb-4f56-9e5c-ef5387893a31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.714225] env[69475]: INFO nova.scheduler.client.report [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Deleted allocation for migration d7afea0c-7a3e-479f-89f1-6da0ed8ba26e [ 1050.766755] env[69475]: INFO nova.compute.manager [None req-0b472591-92ad-45df-b262-ec70bd29fb4b tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Detaching volume d875e52a-1617-4b13-83ce-60084abbe663 [ 1050.768723] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f8bf28-b4c6-e562-acc0-489f52cf836d, 'name': SearchDatastore_Task, 'duration_secs': 0.02293} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.769200] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1050.769474] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] cd0e8c6a-700a-47f8-9a4c-054b84a59a7f/cd0e8c6a-700a-47f8-9a4c-054b84a59a7f.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1050.769776] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1050.770012] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1050.770406] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e3fdf4f-8293-48bd-a4bb-34cc63889fd2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.773116] env[69475]: DEBUG nova.network.neutron [req-ff383e1e-bcb5-469a-aa82-a7e1157b2b74 req-bb18b575-70a4-400a-b0de-337015088f52 service nova] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Updated VIF entry in instance network info cache for port 316b2c71-6909-4d98-a09c-c3c58878a1ed. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1050.773584] env[69475]: DEBUG nova.network.neutron [req-ff383e1e-bcb5-469a-aa82-a7e1157b2b74 req-bb18b575-70a4-400a-b0de-337015088f52 service nova] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Updating instance_info_cache with network_info: [{"id": "316b2c71-6909-4d98-a09c-c3c58878a1ed", "address": "fa:16:3e:d3:e8:59", "network": {"id": "b825e883-3197-4a41-a94b-f363fe49fc0d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1376961670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67d27343d8c04fc9a2bed7a764f6cf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap316b2c71-69", "ovs_interfaceid": "316b2c71-6909-4d98-a09c-c3c58878a1ed", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.778307] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae5d62d6-e880-4d3a-8917-ff5669372906 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.788434] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Waiting for the task: (returnval){ [ 1050.788434] env[69475]: value = "task-3508737" [ 1050.788434] env[69475]: _type = "Task" [ 1050.788434] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.793358] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1050.793543] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1050.795394] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3fbc80c-e6ee-4ef2-b46e-498a66be6e26 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.801245] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': task-3508737, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.805488] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1050.805488] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523dcc0d-4cc7-51a4-9800-27252768726a" [ 1050.805488] env[69475]: _type = "Task" [ 1050.805488] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.814331] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523dcc0d-4cc7-51a4-9800-27252768726a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.822195] env[69475]: INFO nova.virt.block_device [None req-0b472591-92ad-45df-b262-ec70bd29fb4b tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Attempting to driver detach volume d875e52a-1617-4b13-83ce-60084abbe663 from mountpoint /dev/sdb [ 1050.822436] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b472591-92ad-45df-b262-ec70bd29fb4b tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1050.822627] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b472591-92ad-45df-b262-ec70bd29fb4b tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701066', 'volume_id': 'd875e52a-1617-4b13-83ce-60084abbe663', 'name': 'volume-d875e52a-1617-4b13-83ce-60084abbe663', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'e8c2d21e-2e42-48de-928e-c5fd944899b6', 'attached_at': '', 'detached_at': '', 'volume_id': 'd875e52a-1617-4b13-83ce-60084abbe663', 'serial': 'd875e52a-1617-4b13-83ce-60084abbe663'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1050.823551] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da15aa3-6e1d-4253-b956-77cab76ec6ec {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.846021] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab37dec-d27b-4d33-9ab3-5a22e28daa86 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.853753] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e047c161-07ad-4d41-81b4-5554cef04285 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.875701] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49301eec-e952-4c9f-a18b-68465dad5797 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.896570] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b472591-92ad-45df-b262-ec70bd29fb4b tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] The volume has not been displaced from its original location: [datastore2] volume-d875e52a-1617-4b13-83ce-60084abbe663/volume-d875e52a-1617-4b13-83ce-60084abbe663.vmdk. No consolidation needed. {{(pid=69475) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1050.902316] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b472591-92ad-45df-b262-ec70bd29fb4b tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Reconfiguring VM instance instance-00000041 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1050.905732] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ee817bf-6c37-45f2-9b7c-40111ec42b6a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.928823] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508736, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066826} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.930275] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1050.930647] env[69475]: DEBUG oslo_vmware.api [None req-0b472591-92ad-45df-b262-ec70bd29fb4b tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1050.930647] env[69475]: value = "task-3508738" [ 1050.930647] env[69475]: _type = "Task" [ 1050.930647] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.931382] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09f01608-6f5b-4aa1-b467-4187b5cd2b3c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.941933] env[69475]: DEBUG nova.compute.manager [req-259915dc-88f1-4ba3-83de-a9fa55d602f6 req-2f7d2cbf-4888-4808-a201-93a24831c57b service nova] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Received event network-vif-plugged-92c1a899-25cb-4f56-9e5c-ef5387893a31 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1050.942208] env[69475]: DEBUG oslo_concurrency.lockutils [req-259915dc-88f1-4ba3-83de-a9fa55d602f6 req-2f7d2cbf-4888-4808-a201-93a24831c57b service nova] Acquiring lock "951c225b-d930-449f-81b5-4f28f9dd27e5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.942443] env[69475]: DEBUG oslo_concurrency.lockutils [req-259915dc-88f1-4ba3-83de-a9fa55d602f6 req-2f7d2cbf-4888-4808-a201-93a24831c57b service nova] Lock "951c225b-d930-449f-81b5-4f28f9dd27e5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.942617] env[69475]: DEBUG oslo_concurrency.lockutils [req-259915dc-88f1-4ba3-83de-a9fa55d602f6 req-2f7d2cbf-4888-4808-a201-93a24831c57b service nova] Lock "951c225b-d930-449f-81b5-4f28f9dd27e5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.942803] env[69475]: DEBUG nova.compute.manager [req-259915dc-88f1-4ba3-83de-a9fa55d602f6 req-2f7d2cbf-4888-4808-a201-93a24831c57b service nova] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] No waiting events found dispatching network-vif-plugged-92c1a899-25cb-4f56-9e5c-ef5387893a31 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1050.942974] env[69475]: WARNING nova.compute.manager [req-259915dc-88f1-4ba3-83de-a9fa55d602f6 req-2f7d2cbf-4888-4808-a201-93a24831c57b service nova] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Received unexpected event network-vif-plugged-92c1a899-25cb-4f56-9e5c-ef5387893a31 for instance with vm_state building and task_state spawning. [ 1050.943182] env[69475]: DEBUG nova.compute.manager [req-259915dc-88f1-4ba3-83de-a9fa55d602f6 req-2f7d2cbf-4888-4808-a201-93a24831c57b service nova] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Received event network-changed-92c1a899-25cb-4f56-9e5c-ef5387893a31 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1050.943361] env[69475]: DEBUG nova.compute.manager [req-259915dc-88f1-4ba3-83de-a9fa55d602f6 req-2f7d2cbf-4888-4808-a201-93a24831c57b service nova] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Refreshing instance network info cache due to event network-changed-92c1a899-25cb-4f56-9e5c-ef5387893a31. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1050.943564] env[69475]: DEBUG oslo_concurrency.lockutils [req-259915dc-88f1-4ba3-83de-a9fa55d602f6 req-2f7d2cbf-4888-4808-a201-93a24831c57b service nova] Acquiring lock "refresh_cache-951c225b-d930-449f-81b5-4f28f9dd27e5" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.966973] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] ecf115fc-4ca1-41e2-ac42-82ec8154356e/ecf115fc-4ca1-41e2-ac42-82ec8154356e.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1050.967351] env[69475]: DEBUG oslo_vmware.api [None req-0b472591-92ad-45df-b262-ec70bd29fb4b tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508738, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.967973] env[69475]: DEBUG nova.network.neutron [-] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.969412] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e020eb31-99b9-485b-9c73-b3f9123be06b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.989223] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1050.989223] env[69475]: value = "task-3508739" [ 1050.989223] env[69475]: _type = "Task" [ 1050.989223] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.999797] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508739, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.186128] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Releasing lock "refresh_cache-951c225b-d930-449f-81b5-4f28f9dd27e5" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1051.186487] env[69475]: DEBUG nova.compute.manager [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Instance network_info: |[{"id": "92c1a899-25cb-4f56-9e5c-ef5387893a31", "address": "fa:16:3e:f5:d4:bb", "network": {"id": "22b11011-56f9-42ef-852d-f2cb76ffbfa9", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-524720824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d19def3c6f64a21bfaa1e8451186234", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "605f83bd-808c-4b54-922e-54b14690987a", "external-id": "nsx-vlan-transportzone-25", "segmentation_id": 25, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92c1a899-25", "ovs_interfaceid": "92c1a899-25cb-4f56-9e5c-ef5387893a31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1051.190152] env[69475]: DEBUG oslo_concurrency.lockutils [req-259915dc-88f1-4ba3-83de-a9fa55d602f6 req-2f7d2cbf-4888-4808-a201-93a24831c57b service nova] Acquired lock "refresh_cache-951c225b-d930-449f-81b5-4f28f9dd27e5" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1051.190459] env[69475]: DEBUG nova.network.neutron [req-259915dc-88f1-4ba3-83de-a9fa55d602f6 req-2f7d2cbf-4888-4808-a201-93a24831c57b service nova] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Refreshing network info cache for port 92c1a899-25cb-4f56-9e5c-ef5387893a31 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1051.192547] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:d4:bb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '605f83bd-808c-4b54-922e-54b14690987a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '92c1a899-25cb-4f56-9e5c-ef5387893a31', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1051.203495] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Creating folder: Project (4d19def3c6f64a21bfaa1e8451186234). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1051.205131] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f18acfb-ff92-4540-9143-b7c39b9f1a17 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.220206] env[69475]: DEBUG oslo_concurrency.lockutils [None req-594a318f-05ab-444c-a4bd-76ab55074c63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "82236043-3222-4134-8717-4c239ed12aba" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 16.385s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.223854] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Created folder: Project (4d19def3c6f64a21bfaa1e8451186234) in parent group-v700823. [ 1051.224411] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Creating folder: Instances. Parent ref: group-v701100. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1051.224411] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-42c3606a-8104-4fc8-8c0b-9acc2b021994 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.238127] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Created folder: Instances in parent group-v701100. [ 1051.238545] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1051.238818] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1051.239099] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-472be737-2d73-4483-8e13-d70d2df6487a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.273408] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1051.273408] env[69475]: value = "task-3508742" [ 1051.273408] env[69475]: _type = "Task" [ 1051.273408] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.280803] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508742, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.287338] env[69475]: DEBUG oslo_concurrency.lockutils [req-ff383e1e-bcb5-469a-aa82-a7e1157b2b74 req-bb18b575-70a4-400a-b0de-337015088f52 service nova] Releasing lock "refresh_cache-20b37e69-5870-4f63-aeba-9293615da478" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1051.299921] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': task-3508737, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.317950] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523dcc0d-4cc7-51a4-9800-27252768726a, 'name': SearchDatastore_Task, 'duration_secs': 0.01461} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.322822] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-409b6b3a-fbaa-4ee1-ab39-5fc6bb468b8c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.335644] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1051.335644] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5205c8e2-4fff-8000-320d-c5e96a94856e" [ 1051.335644] env[69475]: _type = "Task" [ 1051.335644] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.350195] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5205c8e2-4fff-8000-320d-c5e96a94856e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.448401] env[69475]: DEBUG oslo_vmware.api [None req-0b472591-92ad-45df-b262-ec70bd29fb4b tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508738, 'name': ReconfigVM_Task, 'duration_secs': 0.421463} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.448718] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b472591-92ad-45df-b262-ec70bd29fb4b tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Reconfigured VM instance instance-00000041 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1051.456427] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9cd185b7-3fbd-43e4-a788-f86ad01c2bc5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.473387] env[69475]: DEBUG nova.compute.manager [req-4e0d54d3-7190-4ac0-8c2c-fa6526a0a6be req-b17eb0e4-2604-42b4-b59d-1af983383c9a service nova] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Received event network-changed-fbde5d12-5376-4f30-a0eb-1e63c7d36242 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1051.474646] env[69475]: DEBUG nova.compute.manager [req-4e0d54d3-7190-4ac0-8c2c-fa6526a0a6be req-b17eb0e4-2604-42b4-b59d-1af983383c9a service nova] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Refreshing instance network info cache due to event network-changed-fbde5d12-5376-4f30-a0eb-1e63c7d36242. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1051.474646] env[69475]: DEBUG oslo_concurrency.lockutils [req-4e0d54d3-7190-4ac0-8c2c-fa6526a0a6be req-b17eb0e4-2604-42b4-b59d-1af983383c9a service nova] Acquiring lock "refresh_cache-4f091501-351c-45b8-9f64-4d28d4623df8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.474646] env[69475]: DEBUG oslo_concurrency.lockutils [req-4e0d54d3-7190-4ac0-8c2c-fa6526a0a6be req-b17eb0e4-2604-42b4-b59d-1af983383c9a service nova] Acquired lock "refresh_cache-4f091501-351c-45b8-9f64-4d28d4623df8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1051.474840] env[69475]: DEBUG nova.network.neutron [req-4e0d54d3-7190-4ac0-8c2c-fa6526a0a6be req-b17eb0e4-2604-42b4-b59d-1af983383c9a service nova] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Refreshing network info cache for port fbde5d12-5376-4f30-a0eb-1e63c7d36242 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1051.487072] env[69475]: INFO nova.compute.manager [-] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Took 1.78 seconds to deallocate network for instance. [ 1051.487747] env[69475]: DEBUG oslo_vmware.api [None req-0b472591-92ad-45df-b262-ec70bd29fb4b tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1051.487747] env[69475]: value = "task-3508743" [ 1051.487747] env[69475]: _type = "Task" [ 1051.487747] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.511935] env[69475]: DEBUG oslo_vmware.api [None req-0b472591-92ad-45df-b262-ec70bd29fb4b tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508743, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.516812] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508739, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.568649] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52cc2b7-3453-4c30-bdcc-380d4aec60e1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.577536] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bde7c03-7857-434e-802f-6c64bcc614a1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.618637] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e39f702-046e-4298-8372-df4ac68bab91 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.627507] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63767f18-c14b-4f7f-a9b8-e4ca1ae52592 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.645732] env[69475]: DEBUG nova.compute.provider_tree [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1051.777138] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508742, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.799042] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': task-3508737, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.808721} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.799388] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] cd0e8c6a-700a-47f8-9a4c-054b84a59a7f/cd0e8c6a-700a-47f8-9a4c-054b84a59a7f.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1051.799624] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1051.799983] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-95d7c854-5ac8-48ae-b62f-4ea1b6521116 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.806703] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Waiting for the task: (returnval){ [ 1051.806703] env[69475]: value = "task-3508744" [ 1051.806703] env[69475]: _type = "Task" [ 1051.806703] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.815950] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': task-3508744, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.846290] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5205c8e2-4fff-8000-320d-c5e96a94856e, 'name': SearchDatastore_Task, 'duration_secs': 0.055796} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.846573] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1051.846855] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 4100fb43-1dae-40b1-8caa-11dd67962274/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk. {{(pid=69475) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1051.847177] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b8b2285-999c-47ec-8ba0-85b426afbe91 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.854199] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1051.854199] env[69475]: value = "task-3508745" [ 1051.854199] env[69475]: _type = "Task" [ 1051.854199] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.862608] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508745, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.907947] env[69475]: DEBUG nova.network.neutron [req-259915dc-88f1-4ba3-83de-a9fa55d602f6 req-2f7d2cbf-4888-4808-a201-93a24831c57b service nova] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Updated VIF entry in instance network info cache for port 92c1a899-25cb-4f56-9e5c-ef5387893a31. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1051.908458] env[69475]: DEBUG nova.network.neutron [req-259915dc-88f1-4ba3-83de-a9fa55d602f6 req-2f7d2cbf-4888-4808-a201-93a24831c57b service nova] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Updating instance_info_cache with network_info: [{"id": "92c1a899-25cb-4f56-9e5c-ef5387893a31", "address": "fa:16:3e:f5:d4:bb", "network": {"id": "22b11011-56f9-42ef-852d-f2cb76ffbfa9", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-524720824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4d19def3c6f64a21bfaa1e8451186234", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "605f83bd-808c-4b54-922e-54b14690987a", "external-id": "nsx-vlan-transportzone-25", "segmentation_id": 25, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92c1a899-25", "ovs_interfaceid": "92c1a899-25cb-4f56-9e5c-ef5387893a31", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.981459] env[69475]: DEBUG nova.objects.instance [None req-d06a7dd3-738c-45ba-a044-b68aef25082e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lazy-loading 'flavor' on Instance uuid 82236043-3222-4134-8717-4c239ed12aba {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.001131] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1052.007127] env[69475]: DEBUG oslo_vmware.api [None req-0b472591-92ad-45df-b262-ec70bd29fb4b tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508743, 'name': ReconfigVM_Task, 'duration_secs': 0.184149} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.010403] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0b472591-92ad-45df-b262-ec70bd29fb4b tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701066', 'volume_id': 'd875e52a-1617-4b13-83ce-60084abbe663', 'name': 'volume-d875e52a-1617-4b13-83ce-60084abbe663', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'e8c2d21e-2e42-48de-928e-c5fd944899b6', 'attached_at': '', 'detached_at': '', 'volume_id': 'd875e52a-1617-4b13-83ce-60084abbe663', 'serial': 'd875e52a-1617-4b13-83ce-60084abbe663'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1052.012780] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508739, 'name': ReconfigVM_Task, 'duration_secs': 0.673893} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.015744] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Reconfigured VM instance instance-00000063 to attach disk [datastore2] ecf115fc-4ca1-41e2-ac42-82ec8154356e/ecf115fc-4ca1-41e2-ac42-82ec8154356e.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1052.016687] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-184560dc-bb11-4fd6-9b91-be0b9bfdb3ea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.023781] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1052.023781] env[69475]: value = "task-3508746" [ 1052.023781] env[69475]: _type = "Task" [ 1052.023781] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.036123] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508746, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.152022] env[69475]: DEBUG nova.scheduler.client.report [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1052.243798] env[69475]: DEBUG nova.network.neutron [req-4e0d54d3-7190-4ac0-8c2c-fa6526a0a6be req-b17eb0e4-2604-42b4-b59d-1af983383c9a service nova] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Updated VIF entry in instance network info cache for port fbde5d12-5376-4f30-a0eb-1e63c7d36242. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1052.244488] env[69475]: DEBUG nova.network.neutron [req-4e0d54d3-7190-4ac0-8c2c-fa6526a0a6be req-b17eb0e4-2604-42b4-b59d-1af983383c9a service nova] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Updating instance_info_cache with network_info: [{"id": "fbde5d12-5376-4f30-a0eb-1e63c7d36242", "address": "fa:16:3e:dc:db:e1", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.186", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfbde5d12-53", "ovs_interfaceid": "fbde5d12-5376-4f30-a0eb-1e63c7d36242", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.277747] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508742, 'name': CreateVM_Task, 'duration_secs': 0.966083} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.277929] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1052.279150] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.279150] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.279150] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1052.279441] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77b52ac7-dc14-4164-8e8c-8a54964a30af {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.284891] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Waiting for the task: (returnval){ [ 1052.284891] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52aaef30-c524-75c2-84f7-2091aff7466d" [ 1052.284891] env[69475]: _type = "Task" [ 1052.284891] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.293880] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52aaef30-c524-75c2-84f7-2091aff7466d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.317050] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': task-3508744, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.127797} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.317478] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1052.318473] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c9b49e-0b79-4a9d-8b2d-95ebc1837f2a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.345371] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] cd0e8c6a-700a-47f8-9a4c-054b84a59a7f/cd0e8c6a-700a-47f8-9a4c-054b84a59a7f.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1052.345733] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d432ffd-7936-4659-a4ad-d75ceddf203a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.368731] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Waiting for the task: (returnval){ [ 1052.368731] env[69475]: value = "task-3508747" [ 1052.368731] env[69475]: _type = "Task" [ 1052.368731] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.371968] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508745, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.380231] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': task-3508747, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.411944] env[69475]: DEBUG oslo_concurrency.lockutils [req-259915dc-88f1-4ba3-83de-a9fa55d602f6 req-2f7d2cbf-4888-4808-a201-93a24831c57b service nova] Releasing lock "refresh_cache-951c225b-d930-449f-81b5-4f28f9dd27e5" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1052.412398] env[69475]: DEBUG nova.compute.manager [req-259915dc-88f1-4ba3-83de-a9fa55d602f6 req-2f7d2cbf-4888-4808-a201-93a24831c57b service nova] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Received event network-vif-deleted-de52f276-c28b-45f5-8248-9019b9765828 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1052.412618] env[69475]: INFO nova.compute.manager [req-259915dc-88f1-4ba3-83de-a9fa55d602f6 req-2f7d2cbf-4888-4808-a201-93a24831c57b service nova] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Neutron deleted interface de52f276-c28b-45f5-8248-9019b9765828; detaching it from the instance and deleting it from the info cache [ 1052.412800] env[69475]: DEBUG nova.network.neutron [req-259915dc-88f1-4ba3-83de-a9fa55d602f6 req-2f7d2cbf-4888-4808-a201-93a24831c57b service nova] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.490072] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d06a7dd3-738c-45ba-a044-b68aef25082e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.490376] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d06a7dd3-738c-45ba-a044-b68aef25082e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.490411] env[69475]: DEBUG nova.network.neutron [None req-d06a7dd3-738c-45ba-a044-b68aef25082e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1052.490693] env[69475]: DEBUG nova.objects.instance [None req-d06a7dd3-738c-45ba-a044-b68aef25082e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lazy-loading 'info_cache' on Instance uuid 82236043-3222-4134-8717-4c239ed12aba {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.533541] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508746, 'name': Rename_Task, 'duration_secs': 0.24031} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.533869] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1052.534162] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87f31809-113c-4647-9522-d830ccc7c9f1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.540598] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1052.540598] env[69475]: value = "task-3508748" [ 1052.540598] env[69475]: _type = "Task" [ 1052.540598] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.548836] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508748, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.562537] env[69475]: DEBUG nova.objects.instance [None req-0b472591-92ad-45df-b262-ec70bd29fb4b tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lazy-loading 'flavor' on Instance uuid e8c2d21e-2e42-48de-928e-c5fd944899b6 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.662822] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.547s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.663510] env[69475]: DEBUG nova.compute.manager [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1052.666436] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.329s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.668368] env[69475]: INFO nova.compute.claims [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1052.748249] env[69475]: DEBUG oslo_concurrency.lockutils [req-4e0d54d3-7190-4ac0-8c2c-fa6526a0a6be req-b17eb0e4-2604-42b4-b59d-1af983383c9a service nova] Releasing lock "refresh_cache-4f091501-351c-45b8-9f64-4d28d4623df8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1052.795898] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52aaef30-c524-75c2-84f7-2091aff7466d, 'name': SearchDatastore_Task, 'duration_secs': 0.061498} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.796260] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1052.796510] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1052.796833] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.797014] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.797252] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1052.797563] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f446ce12-592b-4b8d-ae51-3c7e677ecca2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.806745] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1052.806930] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1052.807726] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16973e46-113f-4ffc-847d-34a6a26d4ae5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.812890] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Waiting for the task: (returnval){ [ 1052.812890] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a42e28-4f38-b525-159d-4bcd48da4561" [ 1052.812890] env[69475]: _type = "Task" [ 1052.812890] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.820724] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a42e28-4f38-b525-159d-4bcd48da4561, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.869539] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508745, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.621652} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.870075] env[69475]: INFO nova.virt.vmwareapi.ds_util [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 4100fb43-1dae-40b1-8caa-11dd67962274/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk. [ 1052.871050] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c05b494-276a-4d02-9fec-4d1bdeed7e28 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.900116] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] 4100fb43-1dae-40b1-8caa-11dd67962274/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1052.903523] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67105bcc-3035-4689-9684-4db4d6b5ae9d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.917131] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': task-3508747, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.917769] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-69f1328b-a2cc-4d19-9a0a-306e577bad27 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.928544] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66571937-7c2e-4b4c-8e6f-da5f3d0bf40a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.944501] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1052.944501] env[69475]: value = "task-3508749" [ 1052.944501] env[69475]: _type = "Task" [ 1052.944501] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.953758] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508749, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.967630] env[69475]: DEBUG nova.compute.manager [req-259915dc-88f1-4ba3-83de-a9fa55d602f6 req-2f7d2cbf-4888-4808-a201-93a24831c57b service nova] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Detach interface failed, port_id=de52f276-c28b-45f5-8248-9019b9765828, reason: Instance ff09407e-93ea-4919-ba5f-b7ee6dd018a4 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1052.994609] env[69475]: DEBUG nova.objects.base [None req-d06a7dd3-738c-45ba-a044-b68aef25082e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Object Instance<82236043-3222-4134-8717-4c239ed12aba> lazy-loaded attributes: flavor,info_cache {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1053.051480] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508748, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.173064] env[69475]: DEBUG nova.compute.utils [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1053.177283] env[69475]: DEBUG nova.compute.manager [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Not allocating networking since 'none' was specified. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1053.327343] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a42e28-4f38-b525-159d-4bcd48da4561, 'name': SearchDatastore_Task, 'duration_secs': 0.014714} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.328471] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6abcddf3-ea5c-45bd-a5bc-8bc55ca1acc6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.335048] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Waiting for the task: (returnval){ [ 1053.335048] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527a6e9a-34b9-c4a3-07fb-9bfc61804858" [ 1053.335048] env[69475]: _type = "Task" [ 1053.335048] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.345413] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527a6e9a-34b9-c4a3-07fb-9bfc61804858, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.384572] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': task-3508747, 'name': ReconfigVM_Task, 'duration_secs': 0.519092} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.384904] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Reconfigured VM instance instance-00000064 to attach disk [datastore1] cd0e8c6a-700a-47f8-9a4c-054b84a59a7f/cd0e8c6a-700a-47f8-9a4c-054b84a59a7f.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1053.385624] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-91115590-2692-43dd-ab8f-85823588e512 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.391967] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Waiting for the task: (returnval){ [ 1053.391967] env[69475]: value = "task-3508750" [ 1053.391967] env[69475]: _type = "Task" [ 1053.391967] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.400654] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': task-3508750, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.453928] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508749, 'name': ReconfigVM_Task, 'duration_secs': 0.488066} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.454238] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Reconfigured VM instance instance-0000005d to attach disk [datastore1] 4100fb43-1dae-40b1-8caa-11dd67962274/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1053.455159] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b944db04-0aca-4cf8-8fe3-f3d3c568618e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.482443] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e7ae831-be8f-4a47-a3fb-346dc7c60c6e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.499915] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1053.499915] env[69475]: value = "task-3508751" [ 1053.499915] env[69475]: _type = "Task" [ 1053.499915] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.509047] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508751, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.551862] env[69475]: DEBUG oslo_vmware.api [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508748, 'name': PowerOnVM_Task, 'duration_secs': 0.702658} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.552116] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1053.552381] env[69475]: INFO nova.compute.manager [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Took 9.85 seconds to spawn the instance on the hypervisor. [ 1053.552556] env[69475]: DEBUG nova.compute.manager [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1053.553774] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d7dbbb-94f9-474e-a303-c293b7d01319 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.572260] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0b472591-92ad-45df-b262-ec70bd29fb4b tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e8c2d21e-2e42-48de-928e-c5fd944899b6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.312s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.629074] env[69475]: DEBUG oslo_vmware.rw_handles [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525d7216-dfae-94ab-bdd9-1dac724a69c2/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1053.630591] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d9a693-285b-4c47-9f7a-1e390a10b177 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.637081] env[69475]: DEBUG oslo_vmware.rw_handles [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525d7216-dfae-94ab-bdd9-1dac724a69c2/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1053.637376] env[69475]: ERROR oslo_vmware.rw_handles [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525d7216-dfae-94ab-bdd9-1dac724a69c2/disk-0.vmdk due to incomplete transfer. [ 1053.637681] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5e0c593c-742a-45f6-9b85-810e56934e43 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.647850] env[69475]: DEBUG oslo_vmware.rw_handles [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525d7216-dfae-94ab-bdd9-1dac724a69c2/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1053.648115] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Uploaded image a9351d13-720c-49e6-a8e9-3fac7da2b98a to the Glance image server {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1053.650506] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1053.650808] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4150ce1b-c80a-43aa-a8bd-f1f1320c9a4a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.656713] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1053.656713] env[69475]: value = "task-3508752" [ 1053.656713] env[69475]: _type = "Task" [ 1053.656713] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.665210] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508752, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.678522] env[69475]: DEBUG nova.compute.manager [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1053.767185] env[69475]: DEBUG nova.network.neutron [None req-d06a7dd3-738c-45ba-a044-b68aef25082e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updating instance_info_cache with network_info: [{"id": "91ad3911-8ea3-4bb6-bcf5-fd800e27e57f", "address": "fa:16:3e:a7:cb:82", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91ad3911-8e", "ovs_interfaceid": "91ad3911-8ea3-4bb6-bcf5-fd800e27e57f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.850947] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527a6e9a-34b9-c4a3-07fb-9bfc61804858, 'name': SearchDatastore_Task, 'duration_secs': 0.011496} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.853505] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.853818] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 951c225b-d930-449f-81b5-4f28f9dd27e5/951c225b-d930-449f-81b5-4f28f9dd27e5.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1053.854107] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae0234bc-8e06-4164-a548-c0e4c39d1616 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.860719] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Waiting for the task: (returnval){ [ 1053.860719] env[69475]: value = "task-3508753" [ 1053.860719] env[69475]: _type = "Task" [ 1053.860719] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.868806] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': task-3508753, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.901464] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': task-3508750, 'name': Rename_Task, 'duration_secs': 0.217137} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.904181] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1053.904621] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8162d7e1-ad4f-42c8-b05e-a59bd0fbfeb9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.912714] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Waiting for the task: (returnval){ [ 1053.912714] env[69475]: value = "task-3508754" [ 1053.912714] env[69475]: _type = "Task" [ 1053.912714] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.925333] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': task-3508754, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.005503] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f659be59-7e8e-4926-82e4-f1beec397e5f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.014721] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508751, 'name': ReconfigVM_Task, 'duration_secs': 0.285407} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.017491] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1054.018069] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e05de46-a0dc-47ea-81f6-23b0fd24aa4f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.021033] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d3beaf-6796-4bab-a88e-5ec564c9e423 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.072410] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37611b58-3c36-4e0e-8507-12090e8ec2b1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.075273] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1054.075273] env[69475]: value = "task-3508755" [ 1054.075273] env[69475]: _type = "Task" [ 1054.075273] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.075719] env[69475]: INFO nova.compute.manager [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Took 26.20 seconds to build instance. [ 1054.084970] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b4e38a-2458-464b-b683-578e81cd3a0b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.094851] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508755, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.106065] env[69475]: DEBUG nova.compute.provider_tree [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1054.166820] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508752, 'name': Destroy_Task, 'duration_secs': 0.316524} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.167094] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Destroyed the VM [ 1054.167335] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1054.167584] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-013a4774-a462-468f-be4f-abcb39492142 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.174747] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1054.174747] env[69475]: value = "task-3508756" [ 1054.174747] env[69475]: _type = "Task" [ 1054.174747] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.182458] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508756, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.270344] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d06a7dd3-738c-45ba-a044-b68aef25082e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "refresh_cache-82236043-3222-4134-8717-4c239ed12aba" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1054.374472] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': task-3508753, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.424568] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': task-3508754, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.577972] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9b7decd2-6cc7-4780-b2c5-cf10b45e63e1 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "ecf115fc-4ca1-41e2-ac42-82ec8154356e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.719s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.587188] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508755, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.609379] env[69475]: DEBUG nova.scheduler.client.report [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1054.683642] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508756, 'name': RemoveSnapshot_Task, 'duration_secs': 0.429511} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.683942] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1054.684261] env[69475]: DEBUG nova.compute.manager [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1054.685047] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee17348b-d33d-4da1-be3b-929c2cdac710 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.693217] env[69475]: DEBUG nova.compute.manager [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1054.718789] env[69475]: DEBUG nova.virt.hardware [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1054.719197] env[69475]: DEBUG nova.virt.hardware [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1054.719341] env[69475]: DEBUG nova.virt.hardware [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1054.719604] env[69475]: DEBUG nova.virt.hardware [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1054.719841] env[69475]: DEBUG nova.virt.hardware [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1054.720080] env[69475]: DEBUG nova.virt.hardware [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1054.720364] env[69475]: DEBUG nova.virt.hardware [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1054.720575] env[69475]: DEBUG nova.virt.hardware [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1054.720760] env[69475]: DEBUG nova.virt.hardware [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1054.720946] env[69475]: DEBUG nova.virt.hardware [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1054.721192] env[69475]: DEBUG nova.virt.hardware [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1054.722061] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccac177b-502a-4629-be05-3b448f0f8d1c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.725144] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "e8c2d21e-2e42-48de-928e-c5fd944899b6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.725453] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e8c2d21e-2e42-48de-928e-c5fd944899b6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.725712] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "e8c2d21e-2e42-48de-928e-c5fd944899b6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.725946] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e8c2d21e-2e42-48de-928e-c5fd944899b6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.726195] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e8c2d21e-2e42-48de-928e-c5fd944899b6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.728959] env[69475]: INFO nova.compute.manager [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Terminating instance [ 1054.736172] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a11b50-0c1f-4b34-98fb-2b3c2fa9e5c1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.750855] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Instance VIF info [] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1054.756670] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Creating folder: Project (7e2b98a077454641b94a86ed9c033034). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1054.757163] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37f84b41-3438-4620-95ac-aa3fd715ed74 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.767015] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Created folder: Project (7e2b98a077454641b94a86ed9c033034) in parent group-v700823. [ 1054.767230] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Creating folder: Instances. Parent ref: group-v701103. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1054.767533] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e9798a6d-8d5d-4f31-82d4-7641fb5fa2f4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.776362] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Created folder: Instances in parent group-v701103. [ 1054.776589] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1054.776784] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1054.776962] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75c9bd4e-db22-4b5e-8be9-4f59565e8872 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.793248] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1054.793248] env[69475]: value = "task-3508759" [ 1054.793248] env[69475]: _type = "Task" [ 1054.793248] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.800422] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508759, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.870858] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': task-3508753, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.695793} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.871181] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 951c225b-d930-449f-81b5-4f28f9dd27e5/951c225b-d930-449f-81b5-4f28f9dd27e5.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1054.871414] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1054.871674] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9dea62c5-76bf-4dd0-83c8-57b670c770ba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.877455] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Waiting for the task: (returnval){ [ 1054.877455] env[69475]: value = "task-3508760" [ 1054.877455] env[69475]: _type = "Task" [ 1054.877455] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.886150] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': task-3508760, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.923617] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': task-3508754, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.090130] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508755, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.114693] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.448s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.115562] env[69475]: DEBUG nova.compute.manager [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1055.118652] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.331s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.118941] env[69475]: DEBUG nova.objects.instance [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Lazy-loading 'resources' on Instance uuid 44bcaa36-ecd9-448b-b589-7c32066ede1d {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1055.201068] env[69475]: INFO nova.compute.manager [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Shelve offloading [ 1055.235593] env[69475]: DEBUG nova.compute.manager [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1055.235827] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1055.236745] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4a14a3-663a-49e8-9cb0-59a37a7c6a99 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.248120] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1055.248389] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2575367a-9152-4e09-bdf7-1441fddfdceb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.256508] env[69475]: DEBUG oslo_vmware.api [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1055.256508] env[69475]: value = "task-3508761" [ 1055.256508] env[69475]: _type = "Task" [ 1055.256508] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.265530] env[69475]: DEBUG oslo_vmware.api [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508761, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.275819] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d06a7dd3-738c-45ba-a044-b68aef25082e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1055.276156] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e7e7a325-dfa0-400f-b6cd-b80876b40728 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.286731] env[69475]: DEBUG oslo_vmware.api [None req-d06a7dd3-738c-45ba-a044-b68aef25082e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1055.286731] env[69475]: value = "task-3508762" [ 1055.286731] env[69475]: _type = "Task" [ 1055.286731] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.297228] env[69475]: DEBUG oslo_vmware.api [None req-d06a7dd3-738c-45ba-a044-b68aef25082e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508762, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.304380] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508759, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.388442] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': task-3508760, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100227} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.388969] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1055.389586] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19bf6da-3df9-47d8-b8f9-3661ef329464 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.413696] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 951c225b-d930-449f-81b5-4f28f9dd27e5/951c225b-d930-449f-81b5-4f28f9dd27e5.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1055.415226] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a482a33e-2288-49f3-915f-36542d09c8d2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.444648] env[69475]: DEBUG oslo_vmware.api [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': task-3508754, 'name': PowerOnVM_Task, 'duration_secs': 1.372682} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.446989] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1055.447251] env[69475]: INFO nova.compute.manager [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Took 9.25 seconds to spawn the instance on the hypervisor. [ 1055.447466] env[69475]: DEBUG nova.compute.manager [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1055.448055] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Waiting for the task: (returnval){ [ 1055.448055] env[69475]: value = "task-3508763" [ 1055.448055] env[69475]: _type = "Task" [ 1055.448055] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.448893] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad63e67-ab1c-45d0-a484-d1fe491ec645 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.464768] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': task-3508763, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.589177] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508755, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.623836] env[69475]: DEBUG nova.compute.utils [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1055.628818] env[69475]: DEBUG nova.compute.manager [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1055.628818] env[69475]: DEBUG nova.network.neutron [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1055.681699] env[69475]: DEBUG nova.policy [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f164f821924c4f4aae565d7352fef4a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8ffeef220f04d9eb22ef69b68e9c34a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1055.706225] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1055.706752] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-10cd6ed6-cfba-42c8-b02e-0cf487dccc71 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.714298] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1055.714298] env[69475]: value = "task-3508764" [ 1055.714298] env[69475]: _type = "Task" [ 1055.714298] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.730928] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] VM already powered off {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1055.732894] env[69475]: DEBUG nova.compute.manager [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1055.734477] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb764ef7-0ad5-4683-95ac-77970017fedf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.743277] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.743455] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquired lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1055.743702] env[69475]: DEBUG nova.network.neutron [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1055.765413] env[69475]: DEBUG oslo_vmware.api [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508761, 'name': PowerOffVM_Task, 'duration_secs': 0.248081} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.768091] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1055.768303] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1055.768736] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bd6ad175-d690-48a7-8123-cc0949544c1c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.799838] env[69475]: DEBUG oslo_vmware.api [None req-d06a7dd3-738c-45ba-a044-b68aef25082e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508762, 'name': PowerOnVM_Task, 'duration_secs': 0.433429} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.803282] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d06a7dd3-738c-45ba-a044-b68aef25082e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1055.803501] env[69475]: DEBUG nova.compute.manager [None req-d06a7dd3-738c-45ba-a044-b68aef25082e tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1055.806488] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aacd152d-2941-454b-8aa3-fc378e58e07a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.817248] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508759, 'name': CreateVM_Task, 'duration_secs': 0.521502} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.817377] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1055.817760] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.817916] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1055.818721] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1055.821535] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9426d33-06c0-4dda-b397-46cdc12fa283 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.826874] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1055.826874] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5287250f-ca58-2723-e96d-976734a9906c" [ 1055.826874] env[69475]: _type = "Task" [ 1055.826874] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.838555] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5287250f-ca58-2723-e96d-976734a9906c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.852192] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1055.852456] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1055.852652] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Deleting the datastore file [datastore2] e8c2d21e-2e42-48de-928e-c5fd944899b6 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1055.853626] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d67f5a1e-1ab9-4024-95c9-7d6e09aaf6ae {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.860829] env[69475]: DEBUG oslo_vmware.api [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1055.860829] env[69475]: value = "task-3508766" [ 1055.860829] env[69475]: _type = "Task" [ 1055.860829] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.872171] env[69475]: DEBUG oslo_vmware.api [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508766, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.938358] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "ecf115fc-4ca1-41e2-ac42-82ec8154356e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.938912] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "ecf115fc-4ca1-41e2-ac42-82ec8154356e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.939385] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "ecf115fc-4ca1-41e2-ac42-82ec8154356e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.939784] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "ecf115fc-4ca1-41e2-ac42-82ec8154356e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.940921] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "ecf115fc-4ca1-41e2-ac42-82ec8154356e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.942746] env[69475]: INFO nova.compute.manager [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Terminating instance [ 1055.950530] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb190cb-4253-4a6e-8c02-20361ac38652 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.966228] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e02838d-19d1-4419-a8e7-087097799e0f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.969836] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': task-3508763, 'name': ReconfigVM_Task, 'duration_secs': 0.461655} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.973990] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 951c225b-d930-449f-81b5-4f28f9dd27e5/951c225b-d930-449f-81b5-4f28f9dd27e5.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1055.975113] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-acd9ba46-3305-452b-a474-938429c27a92 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.981902] env[69475]: INFO nova.compute.manager [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Took 20.93 seconds to build instance. [ 1056.012944] env[69475]: DEBUG nova.network.neutron [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Successfully created port: 20ebbb40-d77a-4c82-8038-5b810eb735db {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1056.015680] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e75da15-870e-407c-a151-d85682e1db13 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.019817] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Waiting for the task: (returnval){ [ 1056.019817] env[69475]: value = "task-3508767" [ 1056.019817] env[69475]: _type = "Task" [ 1056.019817] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.027112] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b64380ab-26b9-4a7e-bb05-3c744038483b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.035469] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': task-3508767, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.045906] env[69475]: DEBUG nova.compute.provider_tree [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.088665] env[69475]: DEBUG oslo_vmware.api [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508755, 'name': PowerOnVM_Task, 'duration_secs': 1.581049} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.089281] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1056.092024] env[69475]: DEBUG nova.compute.manager [None req-ee2efed9-44c9-40cc-beea-cee4e6645aff tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1056.092889] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b89e9b9a-0809-4e49-95b9-d28d098c2385 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.129429] env[69475]: DEBUG nova.compute.manager [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1056.337831] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5287250f-ca58-2723-e96d-976734a9906c, 'name': SearchDatastore_Task, 'duration_secs': 0.012579} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.338034] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1056.338255] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1056.338489] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.338634] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1056.338811] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1056.339080] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f8aad6dd-b934-4f37-b8aa-b99a9f79b1ab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.353414] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1056.353414] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1056.353414] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f48f3f9-415b-42e5-8c60-32031b943c1c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.358751] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1056.358751] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52404f18-60ab-9582-b5d1-936bce21bb28" [ 1056.358751] env[69475]: _type = "Task" [ 1056.358751] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.377270] env[69475]: DEBUG oslo_vmware.api [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508766, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186951} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.377270] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52404f18-60ab-9582-b5d1-936bce21bb28, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.377270] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1056.377460] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1056.377654] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1056.377954] env[69475]: INFO nova.compute.manager [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1056.378251] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1056.378502] env[69475]: DEBUG nova.compute.manager [-] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1056.378609] env[69475]: DEBUG nova.network.neutron [-] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1056.447779] env[69475]: DEBUG nova.compute.manager [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1056.448076] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1056.448979] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c7e666c-2e98-409a-9092-82ff508c0fde {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.457637] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1056.457897] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2cd12ead-5e17-4a3b-b67b-263b60a9a62b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.465380] env[69475]: DEBUG oslo_vmware.api [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1056.465380] env[69475]: value = "task-3508768" [ 1056.465380] env[69475]: _type = "Task" [ 1056.465380] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.482495] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8ad79dd1-8082-4cfc-804b-e41e0fe80c9f tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Lock "cd0e8c6a-700a-47f8-9a4c-054b84a59a7f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.444s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.490453] env[69475]: DEBUG oslo_vmware.api [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508768, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.530830] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': task-3508767, 'name': Rename_Task, 'duration_secs': 0.22407} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.533323] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1056.533323] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09cc3cb1-02e2-417d-ab65-56d11be4debf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.537996] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Waiting for the task: (returnval){ [ 1056.537996] env[69475]: value = "task-3508769" [ 1056.537996] env[69475]: _type = "Task" [ 1056.537996] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.547350] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': task-3508769, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.550055] env[69475]: DEBUG nova.scheduler.client.report [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1056.671260] env[69475]: DEBUG oslo_concurrency.lockutils [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "82236043-3222-4134-8717-4c239ed12aba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.671260] env[69475]: DEBUG oslo_concurrency.lockutils [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "82236043-3222-4134-8717-4c239ed12aba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.671260] env[69475]: DEBUG oslo_concurrency.lockutils [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "82236043-3222-4134-8717-4c239ed12aba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.671260] env[69475]: DEBUG oslo_concurrency.lockutils [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "82236043-3222-4134-8717-4c239ed12aba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1056.671260] env[69475]: DEBUG oslo_concurrency.lockutils [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "82236043-3222-4134-8717-4c239ed12aba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1056.673972] env[69475]: INFO nova.compute.manager [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Terminating instance [ 1056.806267] env[69475]: DEBUG nova.network.neutron [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Updating instance_info_cache with network_info: [{"id": "0241fad0-a699-4ab6-8665-37a808867cd9", "address": "fa:16:3e:9e:27:a0", "network": {"id": "77196001-28c0-48c7-924d-a11c93289ae5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-829999270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d25a22195d0c4370a481a242a18f430a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0241fad0-a6", "ovs_interfaceid": "0241fad0-a699-4ab6-8665-37a808867cd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.869914] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52404f18-60ab-9582-b5d1-936bce21bb28, 'name': SearchDatastore_Task, 'duration_secs': 0.037292} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.870685] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8856274-198a-4fa8-af30-df4fed0a4331 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.876547] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1056.876547] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f767a0-2233-3c83-e205-32797a849f64" [ 1056.876547] env[69475]: _type = "Task" [ 1056.876547] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.885945] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f767a0-2233-3c83-e205-32797a849f64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.978028] env[69475]: DEBUG oslo_vmware.api [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508768, 'name': PowerOffVM_Task, 'duration_secs': 0.268139} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.978232] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1056.978396] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1056.978634] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-85689f37-8026-4f17-b546-54b5a2a5036c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.049852] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': task-3508769, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.051994] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1057.052269] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1057.052485] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleting the datastore file [datastore2] ecf115fc-4ca1-41e2-ac42-82ec8154356e {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.052746] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d1c91034-8f6b-41eb-b9d1-4a757198ac83 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.055165] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.937s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.057471] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.294s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.057697] env[69475]: DEBUG nova.objects.instance [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lazy-loading 'resources' on Instance uuid 24ef554b-30bf-4e28-856e-98eb7ec2618b {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1057.065729] env[69475]: DEBUG oslo_vmware.api [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1057.065729] env[69475]: value = "task-3508771" [ 1057.065729] env[69475]: _type = "Task" [ 1057.065729] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.083390] env[69475]: DEBUG oslo_vmware.api [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508771, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.086613] env[69475]: INFO nova.scheduler.client.report [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Deleted allocations for instance 44bcaa36-ecd9-448b-b589-7c32066ede1d [ 1057.141802] env[69475]: DEBUG nova.compute.manager [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1057.172733] env[69475]: DEBUG nova.virt.hardware [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1057.172989] env[69475]: DEBUG nova.virt.hardware [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1057.173140] env[69475]: DEBUG nova.virt.hardware [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1057.173323] env[69475]: DEBUG nova.virt.hardware [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1057.173515] env[69475]: DEBUG nova.virt.hardware [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1057.173740] env[69475]: DEBUG nova.virt.hardware [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1057.173990] env[69475]: DEBUG nova.virt.hardware [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1057.174177] env[69475]: DEBUG nova.virt.hardware [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1057.174365] env[69475]: DEBUG nova.virt.hardware [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1057.174511] env[69475]: DEBUG nova.virt.hardware [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1057.174688] env[69475]: DEBUG nova.virt.hardware [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1057.175610] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1106dcab-65b9-49ed-9c62-8eb119b3124c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.183445] env[69475]: DEBUG nova.compute.manager [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1057.183666] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1057.184545] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d6307f-2d59-41f1-af54-1b1349aa08a3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.188776] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc88a88-dbcf-4aff-962b-57191cc801b0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.198064] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1057.207801] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-724eaed2-78c6-40b1-bd18-40180110e899 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.218633] env[69475]: DEBUG oslo_vmware.api [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1057.218633] env[69475]: value = "task-3508772" [ 1057.218633] env[69475]: _type = "Task" [ 1057.218633] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.230315] env[69475]: DEBUG oslo_vmware.api [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508772, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.309205] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Releasing lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1057.349082] env[69475]: DEBUG nova.compute.manager [req-203d58c5-afd5-4db6-9ad4-3decec1c85ed req-edfbfe08-4bbf-4f8b-ac00-e5ba21ca3c39 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Received event network-vif-deleted-87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1057.349558] env[69475]: INFO nova.compute.manager [req-203d58c5-afd5-4db6-9ad4-3decec1c85ed req-edfbfe08-4bbf-4f8b-ac00-e5ba21ca3c39 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Neutron deleted interface 87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39; detaching it from the instance and deleting it from the info cache [ 1057.349897] env[69475]: DEBUG nova.network.neutron [req-203d58c5-afd5-4db6-9ad4-3decec1c85ed req-edfbfe08-4bbf-4f8b-ac00-e5ba21ca3c39 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.388814] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f767a0-2233-3c83-e205-32797a849f64, 'name': SearchDatastore_Task, 'duration_secs': 0.025994} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.388814] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1057.388814] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 97013703-3506-4441-b80c-cbb5c7e29bdf/97013703-3506-4441-b80c-cbb5c7e29bdf.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1057.389083] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5a57a9b7-8b64-4b4b-a4da-b3bc864d3538 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.396738] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1057.396738] env[69475]: value = "task-3508773" [ 1057.396738] env[69475]: _type = "Task" [ 1057.396738] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.408469] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508773, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.551352] env[69475]: DEBUG oslo_vmware.api [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': task-3508769, 'name': PowerOnVM_Task, 'duration_secs': 0.793622} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.551469] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1057.553714] env[69475]: INFO nova.compute.manager [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Took 8.87 seconds to spawn the instance on the hypervisor. [ 1057.557059] env[69475]: DEBUG nova.compute.manager [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1057.557987] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94b1f75-2edc-44e7-81fe-a63a4a5db5a7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.563985] env[69475]: DEBUG nova.network.neutron [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Successfully updated port: 20ebbb40-d77a-4c82-8038-5b810eb735db {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1057.585524] env[69475]: DEBUG oslo_vmware.api [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508771, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.321805} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.586194] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1057.586443] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1057.586598] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1057.586769] env[69475]: INFO nova.compute.manager [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1057.587573] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1057.587943] env[69475]: DEBUG nova.compute.manager [-] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1057.588063] env[69475]: DEBUG nova.network.neutron [-] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1057.597496] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e2f23ea2-fc5c-466b-972a-a95a051170d1 tempest-ServersListShow298Test-177631875 tempest-ServersListShow298Test-177631875-project-member] Lock "44bcaa36-ecd9-448b-b589-7c32066ede1d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.045s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.618259] env[69475]: DEBUG nova.compute.manager [req-2fc92a29-3433-4859-9c80-c8932ad5b32c req-a8f9d044-da80-48c1-b410-e171daf925b4 service nova] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Received event network-vif-plugged-20ebbb40-d77a-4c82-8038-5b810eb735db {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1057.618432] env[69475]: DEBUG oslo_concurrency.lockutils [req-2fc92a29-3433-4859-9c80-c8932ad5b32c req-a8f9d044-da80-48c1-b410-e171daf925b4 service nova] Acquiring lock "226afd68-34d8-482e-89f9-0c45a300a803-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.618674] env[69475]: DEBUG oslo_concurrency.lockutils [req-2fc92a29-3433-4859-9c80-c8932ad5b32c req-a8f9d044-da80-48c1-b410-e171daf925b4 service nova] Lock "226afd68-34d8-482e-89f9-0c45a300a803-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.618871] env[69475]: DEBUG oslo_concurrency.lockutils [req-2fc92a29-3433-4859-9c80-c8932ad5b32c req-a8f9d044-da80-48c1-b410-e171daf925b4 service nova] Lock "226afd68-34d8-482e-89f9-0c45a300a803-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.619116] env[69475]: DEBUG nova.compute.manager [req-2fc92a29-3433-4859-9c80-c8932ad5b32c req-a8f9d044-da80-48c1-b410-e171daf925b4 service nova] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] No waiting events found dispatching network-vif-plugged-20ebbb40-d77a-4c82-8038-5b810eb735db {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1057.619369] env[69475]: WARNING nova.compute.manager [req-2fc92a29-3433-4859-9c80-c8932ad5b32c req-a8f9d044-da80-48c1-b410-e171daf925b4 service nova] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Received unexpected event network-vif-plugged-20ebbb40-d77a-4c82-8038-5b810eb735db for instance with vm_state building and task_state spawning. [ 1057.717677] env[69475]: INFO nova.compute.manager [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Unrescuing [ 1057.718185] env[69475]: DEBUG oslo_concurrency.lockutils [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "refresh_cache-4100fb43-1dae-40b1-8caa-11dd67962274" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.718245] env[69475]: DEBUG oslo_concurrency.lockutils [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired lock "refresh_cache-4100fb43-1dae-40b1-8caa-11dd67962274" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1057.718482] env[69475]: DEBUG nova.network.neutron [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1057.734632] env[69475]: DEBUG oslo_vmware.api [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508772, 'name': PowerOffVM_Task, 'duration_secs': 0.207904} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.735784] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1057.736049] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1057.736360] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3c64e39a-ea51-4c50-85d4-e71b333e6d34 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.777305] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1057.779286] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-797b82a6-f273-4733-ad12-0b6605bc279e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.782625] env[69475]: DEBUG nova.network.neutron [-] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.790106] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1057.795134] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-25311dfe-37d8-497d-857b-30fafc004ea5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.798360] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1057.798960] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1057.800075] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Deleting the datastore file [datastore1] 82236043-3222-4134-8717-4c239ed12aba {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.800940] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-718e7212-47da-425a-a0d6-f38c4e97f180 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.811067] env[69475]: DEBUG oslo_vmware.api [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1057.811067] env[69475]: value = "task-3508776" [ 1057.811067] env[69475]: _type = "Task" [ 1057.811067] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.826855] env[69475]: DEBUG oslo_vmware.api [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508776, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.852781] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5628a564-456b-4dc5-a705-401f363b03a3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.859101] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1057.859237] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1057.859430] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Deleting the datastore file [datastore2] 8f18d683-7734-4798-8963-7336fe229f16 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1057.860756] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da57c9b2-4dc9-4ad7-828d-cfa434223976 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.872718] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1a12d0a-13e5-45cb-8b3f-c01f3b49aa8f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.887360] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1057.887360] env[69475]: value = "task-3508777" [ 1057.887360] env[69475]: _type = "Task" [ 1057.887360] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.908222] env[69475]: DEBUG nova.compute.manager [req-203d58c5-afd5-4db6-9ad4-3decec1c85ed req-edfbfe08-4bbf-4f8b-ac00-e5ba21ca3c39 service nova] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Detach interface failed, port_id=87df7fc0-9fd5-4c52-bc1e-c8acba0dfc39, reason: Instance e8c2d21e-2e42-48de-928e-c5fd944899b6 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1057.921675] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508777, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.929649] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508773, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.937027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Acquiring lock "cd0e8c6a-700a-47f8-9a4c-054b84a59a7f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.937027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Lock "cd0e8c6a-700a-47f8-9a4c-054b84a59a7f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.937027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Acquiring lock "cd0e8c6a-700a-47f8-9a4c-054b84a59a7f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.937457] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Lock "cd0e8c6a-700a-47f8-9a4c-054b84a59a7f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.937635] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Lock "cd0e8c6a-700a-47f8-9a4c-054b84a59a7f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.943878] env[69475]: INFO nova.compute.manager [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Terminating instance [ 1057.988196] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bc5aa4-5a5c-41a5-b9e0-762be56a9e98 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.996813] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30256ffe-54a1-4e00-b75c-8475f1abb54d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.032365] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bf8539c-f1e5-4c4d-a836-dc9830b7b420 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.040972] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a3e5f6-de78-402a-9610-9b587677bc15 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.056831] env[69475]: DEBUG nova.compute.provider_tree [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1058.070200] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "refresh_cache-226afd68-34d8-482e-89f9-0c45a300a803" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.070400] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "refresh_cache-226afd68-34d8-482e-89f9-0c45a300a803" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.070565] env[69475]: DEBUG nova.network.neutron [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1058.091178] env[69475]: INFO nova.compute.manager [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Took 22.12 seconds to build instance. [ 1058.286589] env[69475]: INFO nova.compute.manager [-] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Took 1.91 seconds to deallocate network for instance. [ 1058.325109] env[69475]: DEBUG oslo_vmware.api [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508776, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.279995} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.325373] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1058.325559] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1058.325733] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1058.325918] env[69475]: INFO nova.compute.manager [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 82236043-3222-4134-8717-4c239ed12aba] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1058.326173] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1058.326367] env[69475]: DEBUG nova.compute.manager [-] [instance: 82236043-3222-4134-8717-4c239ed12aba] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1058.326460] env[69475]: DEBUG nova.network.neutron [-] [instance: 82236043-3222-4134-8717-4c239ed12aba] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1058.396746] env[69475]: DEBUG nova.network.neutron [-] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.399025] env[69475]: DEBUG oslo_vmware.api [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508777, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.215671} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.399025] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1058.399272] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1058.399335] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1058.416846] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508773, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.568332} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.417115] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 97013703-3506-4441-b80c-cbb5c7e29bdf/97013703-3506-4441-b80c-cbb5c7e29bdf.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1058.419199] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1058.419199] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-445a656a-bb32-4c8c-84ba-d42a6859dbb4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.424513] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1058.424513] env[69475]: value = "task-3508778" [ 1058.424513] env[69475]: _type = "Task" [ 1058.424513] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.429160] env[69475]: INFO nova.scheduler.client.report [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Deleted allocations for instance 8f18d683-7734-4798-8963-7336fe229f16 [ 1058.436991] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508778, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.450364] env[69475]: DEBUG nova.compute.manager [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1058.450602] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1058.451542] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d91ddea-dda3-4409-802d-5d4d2a5804df {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.459690] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1058.459964] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-42b6927c-5fc0-4c04-9c5b-325cef62e725 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.467421] env[69475]: DEBUG oslo_vmware.api [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Waiting for the task: (returnval){ [ 1058.467421] env[69475]: value = "task-3508779" [ 1058.467421] env[69475]: _type = "Task" [ 1058.467421] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.479863] env[69475]: DEBUG oslo_vmware.api [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': task-3508779, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.483044] env[69475]: DEBUG nova.network.neutron [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Updating instance_info_cache with network_info: [{"id": "72e7aa25-953c-4253-8e6e-6543fd67af89", "address": "fa:16:3e:a2:e8:60", "network": {"id": "a5ecc342-ff26-4d68-9810-30407b73463d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-941429832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a68f54aa603f46468f50c83cd4fa3e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72e7aa25-95", "ovs_interfaceid": "72e7aa25-953c-4253-8e6e-6543fd67af89", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.562168] env[69475]: DEBUG nova.scheduler.client.report [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1058.593552] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2fea8267-2845-4b21-85f5-ee6cad37c887 tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Lock "951c225b-d930-449f-81b5-4f28f9dd27e5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.635s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.611185] env[69475]: DEBUG nova.network.neutron [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1058.794751] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.797910] env[69475]: DEBUG nova.network.neutron [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Updating instance_info_cache with network_info: [{"id": "20ebbb40-d77a-4c82-8038-5b810eb735db", "address": "fa:16:3e:f5:19:2e", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20ebbb40-d7", "ovs_interfaceid": "20ebbb40-d77a-4c82-8038-5b810eb735db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.899892] env[69475]: INFO nova.compute.manager [-] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Took 1.31 seconds to deallocate network for instance. [ 1058.936442] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.937312] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508778, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.18823} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.937821] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1058.939454] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9fd5979-1c70-483f-ba82-7a3704124078 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.962429] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 97013703-3506-4441-b80c-cbb5c7e29bdf/97013703-3506-4441-b80c-cbb5c7e29bdf.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1058.963100] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a1ede7b-bdb2-41e9-9840-225ee263ccab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.987426] env[69475]: DEBUG oslo_concurrency.lockutils [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Releasing lock "refresh_cache-4100fb43-1dae-40b1-8caa-11dd67962274" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1058.988134] env[69475]: DEBUG nova.objects.instance [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lazy-loading 'flavor' on Instance uuid 4100fb43-1dae-40b1-8caa-11dd67962274 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1058.989723] env[69475]: DEBUG oslo_vmware.api [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': task-3508779, 'name': PowerOffVM_Task, 'duration_secs': 0.319218} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.991293] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1058.991476] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1058.991792] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1058.991792] env[69475]: value = "task-3508780" [ 1058.991792] env[69475]: _type = "Task" [ 1058.991792] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.991980] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-127c241d-d3ec-4af1-b715-7bacdeab406f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.003158] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508780, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.067906] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.010s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.070578] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.070s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.070849] env[69475]: DEBUG nova.objects.instance [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lazy-loading 'resources' on Instance uuid ff09407e-93ea-4919-ba5f-b7ee6dd018a4 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1059.089402] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1059.089634] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1059.089817] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Deleting the datastore file [datastore1] cd0e8c6a-700a-47f8-9a4c-054b84a59a7f {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1059.090108] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d9e2a23-37ff-46d7-b8a0-387eefd7dc6b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.096889] env[69475]: DEBUG oslo_vmware.api [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Waiting for the task: (returnval){ [ 1059.096889] env[69475]: value = "task-3508782" [ 1059.096889] env[69475]: _type = "Task" [ 1059.096889] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.097829] env[69475]: INFO nova.scheduler.client.report [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Deleted allocations for instance 24ef554b-30bf-4e28-856e-98eb7ec2618b [ 1059.109608] env[69475]: DEBUG oslo_vmware.api [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': task-3508782, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.300536] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "refresh_cache-226afd68-34d8-482e-89f9-0c45a300a803" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1059.300919] env[69475]: DEBUG nova.compute.manager [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Instance network_info: |[{"id": "20ebbb40-d77a-4c82-8038-5b810eb735db", "address": "fa:16:3e:f5:19:2e", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20ebbb40-d7", "ovs_interfaceid": "20ebbb40-d77a-4c82-8038-5b810eb735db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1059.301405] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f5:19:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '20ebbb40-d77a-4c82-8038-5b810eb735db', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1059.309830] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1059.311238] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1059.311474] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-988a79ef-1cf1-488e-9282-886393c606b0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.331043] env[69475]: DEBUG nova.network.neutron [-] [instance: 82236043-3222-4134-8717-4c239ed12aba] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.340028] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1059.340028] env[69475]: value = "task-3508783" [ 1059.340028] env[69475]: _type = "Task" [ 1059.340028] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.348152] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508783, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.402677] env[69475]: DEBUG nova.compute.manager [req-53c1169e-d8d3-421b-90ae-491569bbf44f req-f84b0148-ee49-4460-9a22-1e4a6708255f service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Received event network-vif-unplugged-0241fad0-a699-4ab6-8665-37a808867cd9 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1059.402928] env[69475]: DEBUG oslo_concurrency.lockutils [req-53c1169e-d8d3-421b-90ae-491569bbf44f req-f84b0148-ee49-4460-9a22-1e4a6708255f service nova] Acquiring lock "8f18d683-7734-4798-8963-7336fe229f16-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.403191] env[69475]: DEBUG oslo_concurrency.lockutils [req-53c1169e-d8d3-421b-90ae-491569bbf44f req-f84b0148-ee49-4460-9a22-1e4a6708255f service nova] Lock "8f18d683-7734-4798-8963-7336fe229f16-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.403400] env[69475]: DEBUG oslo_concurrency.lockutils [req-53c1169e-d8d3-421b-90ae-491569bbf44f req-f84b0148-ee49-4460-9a22-1e4a6708255f service nova] Lock "8f18d683-7734-4798-8963-7336fe229f16-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.403583] env[69475]: DEBUG nova.compute.manager [req-53c1169e-d8d3-421b-90ae-491569bbf44f req-f84b0148-ee49-4460-9a22-1e4a6708255f service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] No waiting events found dispatching network-vif-unplugged-0241fad0-a699-4ab6-8665-37a808867cd9 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1059.403820] env[69475]: WARNING nova.compute.manager [req-53c1169e-d8d3-421b-90ae-491569bbf44f req-f84b0148-ee49-4460-9a22-1e4a6708255f service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Received unexpected event network-vif-unplugged-0241fad0-a699-4ab6-8665-37a808867cd9 for instance with vm_state shelved_offloaded and task_state None. [ 1059.404018] env[69475]: DEBUG nova.compute.manager [req-53c1169e-d8d3-421b-90ae-491569bbf44f req-f84b0148-ee49-4460-9a22-1e4a6708255f service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Received event network-changed-0241fad0-a699-4ab6-8665-37a808867cd9 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1059.404191] env[69475]: DEBUG nova.compute.manager [req-53c1169e-d8d3-421b-90ae-491569bbf44f req-f84b0148-ee49-4460-9a22-1e4a6708255f service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Refreshing instance network info cache due to event network-changed-0241fad0-a699-4ab6-8665-37a808867cd9. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1059.404395] env[69475]: DEBUG oslo_concurrency.lockutils [req-53c1169e-d8d3-421b-90ae-491569bbf44f req-f84b0148-ee49-4460-9a22-1e4a6708255f service nova] Acquiring lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.404544] env[69475]: DEBUG oslo_concurrency.lockutils [req-53c1169e-d8d3-421b-90ae-491569bbf44f req-f84b0148-ee49-4460-9a22-1e4a6708255f service nova] Acquired lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.404703] env[69475]: DEBUG nova.network.neutron [req-53c1169e-d8d3-421b-90ae-491569bbf44f req-f84b0148-ee49-4460-9a22-1e4a6708255f service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Refreshing network info cache for port 0241fad0-a699-4ab6-8665-37a808867cd9 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1059.406769] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.498745] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33e48ee-d20a-43d5-84d2-2ecd0e3ae72f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.507256] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508780, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.526551] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1059.526955] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-989faaa4-0b98-4f6e-bed9-c0a3559780e2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.533243] env[69475]: DEBUG oslo_vmware.api [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1059.533243] env[69475]: value = "task-3508784" [ 1059.533243] env[69475]: _type = "Task" [ 1059.533243] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.541239] env[69475]: DEBUG oslo_vmware.api [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508784, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.611633] env[69475]: DEBUG oslo_vmware.api [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Task: {'id': task-3508782, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.396254} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.615277] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cffba4a3-3aab-4a63-9e5c-6844f1f66213 tempest-ServerDiskConfigTestJSON-711676554 tempest-ServerDiskConfigTestJSON-711676554-project-member] Lock "24ef554b-30bf-4e28-856e-98eb7ec2618b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.436s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.616344] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1059.617024] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1059.617024] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1059.617024] env[69475]: INFO nova.compute.manager [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1059.617250] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1059.617918] env[69475]: DEBUG nova.compute.manager [-] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1059.618022] env[69475]: DEBUG nova.network.neutron [-] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1059.653538] env[69475]: DEBUG nova.compute.manager [req-1e3c6a06-30cf-4200-9e84-8384596e9a88 req-095203f8-fe17-457d-8e4c-dac8661372c0 service nova] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Received event network-changed-20ebbb40-d77a-4c82-8038-5b810eb735db {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1059.653767] env[69475]: DEBUG nova.compute.manager [req-1e3c6a06-30cf-4200-9e84-8384596e9a88 req-095203f8-fe17-457d-8e4c-dac8661372c0 service nova] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Refreshing instance network info cache due to event network-changed-20ebbb40-d77a-4c82-8038-5b810eb735db. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1059.654074] env[69475]: DEBUG oslo_concurrency.lockutils [req-1e3c6a06-30cf-4200-9e84-8384596e9a88 req-095203f8-fe17-457d-8e4c-dac8661372c0 service nova] Acquiring lock "refresh_cache-226afd68-34d8-482e-89f9-0c45a300a803" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.654165] env[69475]: DEBUG oslo_concurrency.lockutils [req-1e3c6a06-30cf-4200-9e84-8384596e9a88 req-095203f8-fe17-457d-8e4c-dac8661372c0 service nova] Acquired lock "refresh_cache-226afd68-34d8-482e-89f9-0c45a300a803" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.654276] env[69475]: DEBUG nova.network.neutron [req-1e3c6a06-30cf-4200-9e84-8384596e9a88 req-095203f8-fe17-457d-8e4c-dac8661372c0 service nova] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Refreshing network info cache for port 20ebbb40-d77a-4c82-8038-5b810eb735db {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1059.836307] env[69475]: INFO nova.compute.manager [-] [instance: 82236043-3222-4134-8717-4c239ed12aba] Took 1.51 seconds to deallocate network for instance. [ 1059.838313] env[69475]: DEBUG oslo_concurrency.lockutils [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Acquiring lock "951c225b-d930-449f-81b5-4f28f9dd27e5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.838626] env[69475]: DEBUG oslo_concurrency.lockutils [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Lock "951c225b-d930-449f-81b5-4f28f9dd27e5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.838821] env[69475]: DEBUG oslo_concurrency.lockutils [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Acquiring lock "951c225b-d930-449f-81b5-4f28f9dd27e5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.838987] env[69475]: DEBUG oslo_concurrency.lockutils [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Lock "951c225b-d930-449f-81b5-4f28f9dd27e5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.839195] env[69475]: DEBUG oslo_concurrency.lockutils [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Lock "951c225b-d930-449f-81b5-4f28f9dd27e5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.844494] env[69475]: INFO nova.compute.manager [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Terminating instance [ 1059.860465] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508783, 'name': CreateVM_Task, 'duration_secs': 0.3867} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.860646] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1059.867022] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.867022] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.867022] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1059.867022] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c6a786a-a9a4-4aac-a79c-6d5c094a0ade {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.869535] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1059.869535] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521197d3-fc9c-f548-6102-225e02f1cddf" [ 1059.869535] env[69475]: _type = "Task" [ 1059.869535] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.877985] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07fe249b-0bfa-4135-bd55-6d1632516b3a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.884913] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521197d3-fc9c-f548-6102-225e02f1cddf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.891980] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75c79534-a682-48e3-b4e1-043759856260 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.928240] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7975cd-f6d7-4cf6-af45-79a5417cfd1a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.939161] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9987d4b1-95bf-4128-8c05-5016e0994b35 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.957033] env[69475]: DEBUG nova.compute.provider_tree [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.004041] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508780, 'name': ReconfigVM_Task, 'duration_secs': 0.607679} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.005590] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 97013703-3506-4441-b80c-cbb5c7e29bdf/97013703-3506-4441-b80c-cbb5c7e29bdf.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1060.006398] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-83e52650-5e0a-4672-8067-592e8cfc198b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.012984] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1060.012984] env[69475]: value = "task-3508785" [ 1060.012984] env[69475]: _type = "Task" [ 1060.012984] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.021543] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508785, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.042317] env[69475]: DEBUG oslo_vmware.api [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508784, 'name': PowerOffVM_Task, 'duration_secs': 0.210287} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.045236] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1060.050162] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Reconfiguring VM instance instance-0000005d to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1060.050462] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2afac93-eac6-4dd1-a100-5158ec3a7c88 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.070045] env[69475]: DEBUG oslo_vmware.api [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1060.070045] env[69475]: value = "task-3508786" [ 1060.070045] env[69475]: _type = "Task" [ 1060.070045] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.078831] env[69475]: DEBUG oslo_vmware.api [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508786, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.219656] env[69475]: DEBUG nova.network.neutron [req-53c1169e-d8d3-421b-90ae-491569bbf44f req-f84b0148-ee49-4460-9a22-1e4a6708255f service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Updated VIF entry in instance network info cache for port 0241fad0-a699-4ab6-8665-37a808867cd9. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1060.220422] env[69475]: DEBUG nova.network.neutron [req-53c1169e-d8d3-421b-90ae-491569bbf44f req-f84b0148-ee49-4460-9a22-1e4a6708255f service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Updating instance_info_cache with network_info: [{"id": "0241fad0-a699-4ab6-8665-37a808867cd9", "address": "fa:16:3e:9e:27:a0", "network": {"id": "77196001-28c0-48c7-924d-a11c93289ae5", "bridge": null, "label": "tempest-ServersNegativeTestJSON-829999270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d25a22195d0c4370a481a242a18f430a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap0241fad0-a6", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.355431] env[69475]: DEBUG oslo_concurrency.lockutils [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.358018] env[69475]: DEBUG nova.compute.manager [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1060.358018] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1060.358018] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0bfee6b-92bc-4c98-90a5-fc1a4b0b206b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.369150] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1060.369150] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76d7435b-c56f-4b82-96db-fb84d3eb94ea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.379982] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521197d3-fc9c-f548-6102-225e02f1cddf, 'name': SearchDatastore_Task, 'duration_secs': 0.011615} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.381520] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.384093] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1060.384093] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1060.384093] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1060.384093] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1060.384093] env[69475]: DEBUG oslo_vmware.api [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Waiting for the task: (returnval){ [ 1060.384093] env[69475]: value = "task-3508787" [ 1060.384093] env[69475]: _type = "Task" [ 1060.384093] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.384093] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ba4c43f0-a755-45f8-b796-9bf2ea28f59d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.396066] env[69475]: DEBUG nova.network.neutron [req-1e3c6a06-30cf-4200-9e84-8384596e9a88 req-095203f8-fe17-457d-8e4c-dac8661372c0 service nova] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Updated VIF entry in instance network info cache for port 20ebbb40-d77a-4c82-8038-5b810eb735db. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1060.396498] env[69475]: DEBUG nova.network.neutron [req-1e3c6a06-30cf-4200-9e84-8384596e9a88 req-095203f8-fe17-457d-8e4c-dac8661372c0 service nova] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Updating instance_info_cache with network_info: [{"id": "20ebbb40-d77a-4c82-8038-5b810eb735db", "address": "fa:16:3e:f5:19:2e", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap20ebbb40-d7", "ovs_interfaceid": "20ebbb40-d77a-4c82-8038-5b810eb735db", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.401406] env[69475]: DEBUG oslo_vmware.api [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': task-3508787, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.403164] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1060.403282] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1060.405170] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e5711d7-bf08-418a-8def-86d19a51086a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.414527] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1060.414527] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dbbee3-a750-014a-20e1-38f2745dcec9" [ 1060.414527] env[69475]: _type = "Task" [ 1060.414527] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.425434] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dbbee3-a750-014a-20e1-38f2745dcec9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.459938] env[69475]: DEBUG nova.scheduler.client.report [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1060.507050] env[69475]: DEBUG nova.network.neutron [-] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1060.524410] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508785, 'name': Rename_Task, 'duration_secs': 0.144437} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.524738] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1060.525429] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-129eb07f-cb67-4610-ad38-3ed5da34235f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.531673] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1060.531673] env[69475]: value = "task-3508788" [ 1060.531673] env[69475]: _type = "Task" [ 1060.531673] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.540480] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508788, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.579421] env[69475]: DEBUG oslo_vmware.api [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508786, 'name': ReconfigVM_Task, 'duration_secs': 0.393659} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.579803] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Reconfigured VM instance instance-0000005d to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1060.580008] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1060.580268] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b482ec79-7544-41f1-a565-097971d7228d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.586891] env[69475]: DEBUG oslo_vmware.api [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1060.586891] env[69475]: value = "task-3508789" [ 1060.586891] env[69475]: _type = "Task" [ 1060.586891] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.594973] env[69475]: DEBUG oslo_vmware.api [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508789, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.723326] env[69475]: DEBUG oslo_concurrency.lockutils [req-53c1169e-d8d3-421b-90ae-491569bbf44f req-f84b0148-ee49-4460-9a22-1e4a6708255f service nova] Releasing lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.723326] env[69475]: DEBUG nova.compute.manager [req-53c1169e-d8d3-421b-90ae-491569bbf44f req-f84b0148-ee49-4460-9a22-1e4a6708255f service nova] [instance: 82236043-3222-4134-8717-4c239ed12aba] Received event network-vif-deleted-91ad3911-8ea3-4bb6-bcf5-fd800e27e57f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1060.895629] env[69475]: DEBUG oslo_vmware.api [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': task-3508787, 'name': PowerOffVM_Task, 'duration_secs': 0.162293} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.895915] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1060.896071] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1060.896330] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c99ab4bd-470c-4dab-a73e-6696c36a1dba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.902647] env[69475]: DEBUG oslo_concurrency.lockutils [req-1e3c6a06-30cf-4200-9e84-8384596e9a88 req-095203f8-fe17-457d-8e4c-dac8661372c0 service nova] Releasing lock "refresh_cache-226afd68-34d8-482e-89f9-0c45a300a803" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.902885] env[69475]: DEBUG nova.compute.manager [req-1e3c6a06-30cf-4200-9e84-8384596e9a88 req-095203f8-fe17-457d-8e4c-dac8661372c0 service nova] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Received event network-vif-deleted-f26ed400-5630-4899-b5dd-a9af4540d3d7 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1060.911638] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "8f18d683-7734-4798-8963-7336fe229f16" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1060.926223] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52dbbee3-a750-014a-20e1-38f2745dcec9, 'name': SearchDatastore_Task, 'duration_secs': 0.011086} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.927159] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-357f4636-4fe9-467b-b375-12eabaf87c67 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.933054] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1060.933054] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52408c5d-7cb6-e3a4-0eb6-55e97f7e8f94" [ 1060.933054] env[69475]: _type = "Task" [ 1060.933054] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.941395] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52408c5d-7cb6-e3a4-0eb6-55e97f7e8f94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.965811] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.895s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1060.967641] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.173s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1060.967883] env[69475]: DEBUG nova.objects.instance [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lazy-loading 'resources' on Instance uuid e8c2d21e-2e42-48de-928e-c5fd944899b6 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1060.977423] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1060.978896] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1060.978896] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Deleting the datastore file [datastore1] 951c225b-d930-449f-81b5-4f28f9dd27e5 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1060.978896] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1bb0b0e8-7144-4369-b4a0-2c5585b33ac5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.986122] env[69475]: DEBUG oslo_vmware.api [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Waiting for the task: (returnval){ [ 1060.986122] env[69475]: value = "task-3508791" [ 1060.986122] env[69475]: _type = "Task" [ 1060.986122] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.998092] env[69475]: INFO nova.scheduler.client.report [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Deleted allocations for instance ff09407e-93ea-4919-ba5f-b7ee6dd018a4 [ 1061.004907] env[69475]: DEBUG oslo_vmware.api [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': task-3508791, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.009865] env[69475]: INFO nova.compute.manager [-] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Took 1.39 seconds to deallocate network for instance. [ 1061.043080] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508788, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.098919] env[69475]: DEBUG oslo_vmware.api [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508789, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.432111] env[69475]: DEBUG nova.compute.manager [req-7915ae5a-dcef-4bec-9c4f-8252427fb03a req-3f6debe4-e127-459f-9e96-d42f3aeedc64 service nova] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Received event network-vif-deleted-d25c0e76-62cc-44b6-936c-43b7de37c528 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1061.443910] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52408c5d-7cb6-e3a4-0eb6-55e97f7e8f94, 'name': SearchDatastore_Task, 'duration_secs': 0.01124} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.444174] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.444421] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 226afd68-34d8-482e-89f9-0c45a300a803/226afd68-34d8-482e-89f9-0c45a300a803.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1061.444696] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9190857-68cf-4388-88ff-a2ffbe304f9f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.451233] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1061.451233] env[69475]: value = "task-3508792" [ 1061.451233] env[69475]: _type = "Task" [ 1061.451233] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.459399] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508792, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.497142] env[69475]: DEBUG oslo_vmware.api [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': task-3508791, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.508169] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bb9a628b-deae-4301-af6b-ef2bc920c78e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "ff09407e-93ea-4919-ba5f-b7ee6dd018a4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.489s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.521683] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1061.544441] env[69475]: DEBUG oslo_vmware.api [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508788, 'name': PowerOnVM_Task, 'duration_secs': 0.969062} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.546042] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1061.546042] env[69475]: INFO nova.compute.manager [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Took 6.85 seconds to spawn the instance on the hypervisor. [ 1061.546042] env[69475]: DEBUG nova.compute.manager [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1061.546042] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c2be9d-3f28-4a7d-b61e-6eabeb2547ef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.604199] env[69475]: DEBUG oslo_vmware.api [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508789, 'name': PowerOnVM_Task, 'duration_secs': 0.865348} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.607242] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1061.607242] env[69475]: DEBUG nova.compute.manager [None req-71bda114-b5e2-48db-96c9-f69f418e58ec tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1061.607242] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21612c89-bcbe-45be-9f10-f5b74023f78b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.789933] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55f74bf-e466-434f-b658-c44b73712305 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.802291] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781448c3-3eac-45c1-84c2-c0fce5da9aa0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.837826] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb461009-f63e-4e39-8510-fa15df8a6459 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.851830] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9c0587c-b212-412d-a565-31134910e500 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.870654] env[69475]: DEBUG nova.compute.provider_tree [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1061.961753] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508792, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.003662] env[69475]: DEBUG oslo_vmware.api [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Task: {'id': task-3508791, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.515987} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.004414] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1062.004901] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1062.005453] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1062.005899] env[69475]: INFO nova.compute.manager [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Took 1.65 seconds to destroy the instance on the hypervisor. [ 1062.006394] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1062.006795] env[69475]: DEBUG nova.compute.manager [-] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1062.007033] env[69475]: DEBUG nova.network.neutron [-] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1062.069457] env[69475]: INFO nova.compute.manager [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Took 21.16 seconds to build instance. [ 1062.374583] env[69475]: DEBUG nova.scheduler.client.report [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1062.461730] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508792, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.600725} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.462016] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 226afd68-34d8-482e-89f9-0c45a300a803/226afd68-34d8-482e-89f9-0c45a300a803.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1062.462217] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1062.462505] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e604fe2-f1ad-49f8-a85b-942af0233971 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.469025] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1062.469025] env[69475]: value = "task-3508793" [ 1062.469025] env[69475]: _type = "Task" [ 1062.469025] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.477104] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508793, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.571811] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0bef7e75-3a11-43fc-bf9b-54d16e613952 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Lock "97013703-3506-4441-b80c-cbb5c7e29bdf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.669s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.881246] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.913s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1062.883470] env[69475]: DEBUG nova.network.neutron [-] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.885433] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.949s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1062.885663] env[69475]: DEBUG nova.objects.instance [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lazy-loading 'resources' on Instance uuid 8f18d683-7734-4798-8963-7336fe229f16 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1062.908870] env[69475]: INFO nova.scheduler.client.report [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Deleted allocations for instance e8c2d21e-2e42-48de-928e-c5fd944899b6 [ 1062.979179] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508793, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064314} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.979463] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1062.980235] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc19144-fdd1-4d38-809e-20b88bec34d2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.003357] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 226afd68-34d8-482e-89f9-0c45a300a803/226afd68-34d8-482e-89f9-0c45a300a803.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1063.003674] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-000095c6-42fd-4837-8c16-25755d0dd46a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.026020] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1063.026020] env[69475]: value = "task-3508794" [ 1063.026020] env[69475]: _type = "Task" [ 1063.026020] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1063.039640] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508794, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.388581] env[69475]: INFO nova.compute.manager [-] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Took 1.38 seconds to deallocate network for instance. [ 1063.388874] env[69475]: DEBUG nova.objects.instance [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lazy-loading 'numa_topology' on Instance uuid 8f18d683-7734-4798-8963-7336fe229f16 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1063.417028] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1dafb2da-f39a-403e-83db-ac5d54a0d242 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e8c2d21e-2e42-48de-928e-c5fd944899b6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.691s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.460903] env[69475]: DEBUG nova.compute.manager [req-cbfda6df-ea35-4a9b-8463-1f22e2450862 req-6e849fae-9587-40b3-876f-5d95275796ee service nova] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Received event network-vif-deleted-92c1a899-25cb-4f56-9e5c-ef5387893a31 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1063.538656] env[69475]: INFO nova.compute.manager [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Rebuilding instance [ 1063.540724] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508794, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.595763] env[69475]: DEBUG nova.compute.manager [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1063.597119] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b50252b-e335-49bd-8c4c-9dd1123dc2e9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.868451] env[69475]: DEBUG nova.compute.manager [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Stashing vm_state: active {{(pid=69475) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1063.894032] env[69475]: DEBUG nova.objects.base [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Object Instance<8f18d683-7734-4798-8963-7336fe229f16> lazy-loaded attributes: resources,numa_topology {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1063.898194] env[69475]: DEBUG oslo_concurrency.lockutils [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.035814] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508794, 'name': ReconfigVM_Task, 'duration_secs': 0.695492} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.040460] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 226afd68-34d8-482e-89f9-0c45a300a803/226afd68-34d8-482e-89f9-0c45a300a803.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1064.041818] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb0ff678-c357-400e-b43b-20c03cb67e1c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.048168] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1064.048168] env[69475]: value = "task-3508795" [ 1064.048168] env[69475]: _type = "Task" [ 1064.048168] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.061401] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508795, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.184288] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101dab36-89c6-4e04-8acd-bcc6f667b320 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.193393] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e33f7608-44ac-4e98-8384-0fdecdd1bdc3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.226648] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfebccdc-e189-4e59-b73c-434e0278c086 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.235789] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-870e5b36-bd08-4dc1-a9e3-f86fb8ea065e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.254187] env[69475]: DEBUG nova.compute.provider_tree [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1064.387911] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.561118] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508795, 'name': Rename_Task, 'duration_secs': 0.368585} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.561118] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1064.561118] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e813a369-0ef3-4412-b91b-16ed033354be {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.566357] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1064.566357] env[69475]: value = "task-3508796" [ 1064.566357] env[69475]: _type = "Task" [ 1064.566357] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.575829] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508796, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.617107] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1064.617107] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-998dd8b3-2135-41db-96b9-95ded34a3521 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.622424] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1064.622424] env[69475]: value = "task-3508797" [ 1064.622424] env[69475]: _type = "Task" [ 1064.622424] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.638750] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508797, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.758457] env[69475]: DEBUG nova.scheduler.client.report [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1065.079968] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508796, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.135054] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508797, 'name': PowerOffVM_Task, 'duration_secs': 0.261482} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.135342] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1065.135980] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1065.136765] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0fb2ce7-4caf-48f9-99a6-9882cddd2e11 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.147285] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1065.147536] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fda32c5f-11d0-4805-b35c-28841db1b83d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.174769] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1065.175705] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1065.175965] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Deleting the datastore file [datastore1] 97013703-3506-4441-b80c-cbb5c7e29bdf {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1065.176323] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dab2ad74-a974-478f-bb0d-22d727a7a99d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.182706] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1065.182706] env[69475]: value = "task-3508799" [ 1065.182706] env[69475]: _type = "Task" [ 1065.182706] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.192301] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508799, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.273067] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.387s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.275863] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.869s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.276346] env[69475]: DEBUG nova.objects.instance [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lazy-loading 'resources' on Instance uuid ecf115fc-4ca1-41e2-ac42-82ec8154356e {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1066.084814] env[69475]: DEBUG oslo_concurrency.lockutils [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.084814] env[69475]: DEBUG oslo_concurrency.lockutils [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.085189] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3791f0f7-7333-4fba-8d0f-6559c093acae tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "8f18d683-7734-4798-8963-7336fe229f16" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 28.216s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.088944] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "8f18d683-7734-4798-8963-7336fe229f16" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 5.175s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.088944] env[69475]: INFO nova.compute.manager [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Unshelving [ 1066.093753] env[69475]: DEBUG oslo_vmware.api [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508796, 'name': PowerOnVM_Task, 'duration_secs': 0.850053} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.093916] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508799, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.221676} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.094532] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1066.094656] env[69475]: INFO nova.compute.manager [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Took 8.95 seconds to spawn the instance on the hypervisor. [ 1066.094827] env[69475]: DEBUG nova.compute.manager [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1066.095146] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1066.095298] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1066.095456] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1066.098730] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a4a371e-eade-45cc-8632-ff9a77ebdea6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.375697] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a5ec48-3f2c-44fa-a7df-93ddfefed13e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.382122] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76adbc4-49f2-4c2b-a5e6-6e91fa0c6302 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.412865] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbeef061-0950-4a7c-9954-61f34afe6ad2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.420114] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc812f8-49f2-4fa6-bc58-c3499a0f252e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.433277] env[69475]: DEBUG nova.compute.provider_tree [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1066.588584] env[69475]: DEBUG nova.compute.manager [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1066.620067] env[69475]: INFO nova.compute.manager [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Took 24.30 seconds to build instance. [ 1066.621160] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1066.621581] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1066.937431] env[69475]: DEBUG nova.scheduler.client.report [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1067.110550] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Acquiring lock "60516e16-bd7e-4fc1-b95f-603fb5ef6ae9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.111010] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Lock "60516e16-bd7e-4fc1-b95f-603fb5ef6ae9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.116165] env[69475]: DEBUG oslo_concurrency.lockutils [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.117513] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.125535] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e9a8a268-ea51-4c0a-bd0e-0450cf8bf522 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "226afd68-34d8-482e-89f9-0c45a300a803" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.831s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.126011] env[69475]: DEBUG nova.compute.manager [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1067.140518] env[69475]: DEBUG nova.virt.hardware [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1067.140659] env[69475]: DEBUG nova.virt.hardware [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1067.140739] env[69475]: DEBUG nova.virt.hardware [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1067.140870] env[69475]: DEBUG nova.virt.hardware [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1067.141019] env[69475]: DEBUG nova.virt.hardware [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1067.141202] env[69475]: DEBUG nova.virt.hardware [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1067.141359] env[69475]: DEBUG nova.virt.hardware [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1067.141525] env[69475]: DEBUG nova.virt.hardware [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1067.141677] env[69475]: DEBUG nova.virt.hardware [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1067.141976] env[69475]: DEBUG nova.virt.hardware [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1067.142057] env[69475]: DEBUG nova.virt.hardware [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1067.143156] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6875c9-0b27-4671-8202-eb3dc4369447 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.152025] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53b51fe4-4d1b-44cb-ad4a-74a79f51ce11 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.164834] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Instance VIF info [] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1067.170381] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1067.170840] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1067.171055] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fa1e8019-cdf3-4653-8b9c-e036e2e2073b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.188446] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1067.188446] env[69475]: value = "task-3508800" [ 1067.188446] env[69475]: _type = "Task" [ 1067.188446] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.199143] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508800, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.443527] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.168s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.445882] env[69475]: DEBUG oslo_concurrency.lockutils [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.091s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.447319] env[69475]: DEBUG oslo_concurrency.lockutils [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.449169] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.928s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.449774] env[69475]: DEBUG nova.objects.instance [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Lazy-loading 'resources' on Instance uuid cd0e8c6a-700a-47f8-9a4c-054b84a59a7f {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1067.467092] env[69475]: INFO nova.scheduler.client.report [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleted allocations for instance ecf115fc-4ca1-41e2-ac42-82ec8154356e [ 1067.475218] env[69475]: INFO nova.scheduler.client.report [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Deleted allocations for instance 82236043-3222-4134-8717-4c239ed12aba [ 1067.617946] env[69475]: DEBUG nova.compute.manager [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1067.652122] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.698692] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508800, 'name': CreateVM_Task, 'duration_secs': 0.305227} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.699236] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1067.699713] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.699921] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1067.700363] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1067.700803] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b74ba00-e620-4807-aa79-d2f2fdb4c7ac {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.706419] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1067.706419] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520e275d-759f-dc72-0068-1615fc2603fe" [ 1067.706419] env[69475]: _type = "Task" [ 1067.706419] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.707511] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70498945-9ed2-47be-adbf-a82a57f0e3af {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.716065] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1dc4f7c9-ee80-4b31-81da-2ee992f4c0cc tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Suspending the VM {{(pid=69475) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1067.718865] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-154480a5-e217-4de9-8b20-18f6f50934ba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.720452] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520e275d-759f-dc72-0068-1615fc2603fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.725380] env[69475]: DEBUG oslo_vmware.api [None req-1dc4f7c9-ee80-4b31-81da-2ee992f4c0cc tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1067.725380] env[69475]: value = "task-3508801" [ 1067.725380] env[69475]: _type = "Task" [ 1067.725380] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.733155] env[69475]: DEBUG oslo_vmware.api [None req-1dc4f7c9-ee80-4b31-81da-2ee992f4c0cc tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508801, 'name': SuspendVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.981224] env[69475]: DEBUG oslo_concurrency.lockutils [None req-7d13f60d-8519-40a7-8409-2e9d95c7782d tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "ecf115fc-4ca1-41e2-ac42-82ec8154356e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.042s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.990923] env[69475]: DEBUG oslo_concurrency.lockutils [None req-686a3d2f-0805-494f-a4cf-468f524c8306 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "82236043-3222-4134-8717-4c239ed12aba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.323s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.156921] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.218878] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520e275d-759f-dc72-0068-1615fc2603fe, 'name': SearchDatastore_Task, 'duration_secs': 0.01205} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.219198] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1068.219441] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1068.219681] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.219857] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.220047] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1068.220347] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a07b017a-2820-4184-a894-b98b141394de {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.231104] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1068.231287] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1068.237183] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19c8372e-d712-4217-8c9d-a739831b734c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.240043] env[69475]: DEBUG oslo_vmware.api [None req-1dc4f7c9-ee80-4b31-81da-2ee992f4c0cc tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508801, 'name': SuspendVM_Task} progress is 58%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.243368] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1068.243368] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525a2b0d-f398-abb2-0ec6-d0a8665dcb1c" [ 1068.243368] env[69475]: _type = "Task" [ 1068.243368] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.253317] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525a2b0d-f398-abb2-0ec6-d0a8665dcb1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.254960] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91fc586-3f2a-4749-8030-2edee3654227 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.261573] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1895be25-58ec-47ca-b1db-3d80c179306a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.292847] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a7f5686-7996-4103-b549-3b91c3d75714 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.300648] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924e0be9-c202-4550-848c-44bbbdc158ff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.313552] env[69475]: DEBUG nova.compute.provider_tree [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1068.737196] env[69475]: DEBUG oslo_vmware.api [None req-1dc4f7c9-ee80-4b31-81da-2ee992f4c0cc tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508801, 'name': SuspendVM_Task, 'duration_secs': 0.751982} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.737196] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1dc4f7c9-ee80-4b31-81da-2ee992f4c0cc tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Suspended the VM {{(pid=69475) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1068.737196] env[69475]: DEBUG nova.compute.manager [None req-1dc4f7c9-ee80-4b31-81da-2ee992f4c0cc tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1068.737656] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a86d353-c47c-4fb6-bd5a-75cb7db1c7f4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.757758] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525a2b0d-f398-abb2-0ec6-d0a8665dcb1c, 'name': SearchDatastore_Task, 'duration_secs': 0.008933} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.758704] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f3c128f-cd01-4beb-b872-8e858e2a8aa4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.765184] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1068.765184] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52350333-1301-dfbc-6c4f-01068f9a3a7e" [ 1068.765184] env[69475]: _type = "Task" [ 1068.765184] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.774897] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52350333-1301-dfbc-6c4f-01068f9a3a7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.819656] env[69475]: DEBUG nova.scheduler.client.report [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1068.907519] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.907756] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.239985] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "96533442-eb53-4bc2-bda3-71efc973d403" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1069.240271] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "96533442-eb53-4bc2-bda3-71efc973d403" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.276197] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52350333-1301-dfbc-6c4f-01068f9a3a7e, 'name': SearchDatastore_Task, 'duration_secs': 0.044707} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.276367] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.276623] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 97013703-3506-4441-b80c-cbb5c7e29bdf/97013703-3506-4441-b80c-cbb5c7e29bdf.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1069.276875] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-502bd293-31b0-4d8a-ae76-187653fda760 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.283547] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1069.283547] env[69475]: value = "task-3508802" [ 1069.283547] env[69475]: _type = "Task" [ 1069.283547] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.290620] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508802, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.324279] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.875s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.326458] env[69475]: DEBUG oslo_concurrency.lockutils [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.428s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1069.326798] env[69475]: DEBUG nova.objects.instance [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Lazy-loading 'resources' on Instance uuid 951c225b-d930-449f-81b5-4f28f9dd27e5 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1069.342806] env[69475]: INFO nova.scheduler.client.report [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Deleted allocations for instance cd0e8c6a-700a-47f8-9a4c-054b84a59a7f [ 1069.410284] env[69475]: DEBUG nova.compute.manager [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1069.744343] env[69475]: DEBUG nova.compute.manager [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1069.794191] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508802, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506471} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.794554] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 97013703-3506-4441-b80c-cbb5c7e29bdf/97013703-3506-4441-b80c-cbb5c7e29bdf.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1069.794815] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1069.795119] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6a3fbf55-248a-4796-a42a-602c34bffbb6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.801629] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1069.801629] env[69475]: value = "task-3508803" [ 1069.801629] env[69475]: _type = "Task" [ 1069.801629] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.809309] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508803, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.852941] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d321e7ba-6a62-4219-8a46-5c38328965e1 tempest-TenantUsagesTestJSON-698857048 tempest-TenantUsagesTestJSON-698857048-project-member] Lock "cd0e8c6a-700a-47f8-9a4c-054b84a59a7f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.916s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1069.930123] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.056191] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b30ad2a-bdae-4ddd-a38d-c5c88724f2d3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.065214] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ea52d4-91fb-43ed-a176-234799fd28ed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.094342] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaebca69-fe8d-439a-9815-0fbe0bdc792a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.100900] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e180434-f311-4aaf-8c9b-a37ebbf0f65c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.113565] env[69475]: DEBUG nova.compute.provider_tree [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1070.192187] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "226afd68-34d8-482e-89f9-0c45a300a803" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.192363] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "226afd68-34d8-482e-89f9-0c45a300a803" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.192571] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "226afd68-34d8-482e-89f9-0c45a300a803-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.192742] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "226afd68-34d8-482e-89f9-0c45a300a803-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.192907] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "226afd68-34d8-482e-89f9-0c45a300a803-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.195191] env[69475]: INFO nova.compute.manager [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Terminating instance [ 1070.261578] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.311091] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508803, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066185} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.311367] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1070.312309] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbfcc0bd-c84e-4d18-bf3f-b6b5427af238 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.331808] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] 97013703-3506-4441-b80c-cbb5c7e29bdf/97013703-3506-4441-b80c-cbb5c7e29bdf.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1070.332131] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eecdc464-26ea-4c42-a807-e05ddaaba236 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.352174] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1070.352174] env[69475]: value = "task-3508804" [ 1070.352174] env[69475]: _type = "Task" [ 1070.352174] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.359323] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508804, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.618090] env[69475]: DEBUG nova.scheduler.client.report [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1070.698575] env[69475]: DEBUG nova.compute.manager [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1070.698817] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1070.700029] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff959aba-3993-4e2f-946d-94b7a537ca9c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.709117] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1070.709376] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0338b6ae-1828-45b0-83d6-3aa0eb24fd02 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.769818] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1070.770062] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1070.770333] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleting the datastore file [datastore1] 226afd68-34d8-482e-89f9-0c45a300a803 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1070.770898] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-03dc0f7c-4d8e-4078-901f-c1c89c19f414 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.777142] env[69475]: DEBUG oslo_vmware.api [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1070.777142] env[69475]: value = "task-3508806" [ 1070.777142] env[69475]: _type = "Task" [ 1070.777142] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.784691] env[69475]: DEBUG oslo_vmware.api [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508806, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.862459] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508804, 'name': ReconfigVM_Task, 'duration_secs': 0.307677} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.862973] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Reconfigured VM instance instance-00000066 to attach disk [datastore1] 97013703-3506-4441-b80c-cbb5c7e29bdf/97013703-3506-4441-b80c-cbb5c7e29bdf.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1070.863845] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cc80509e-71ef-4b26-909d-cadf0f7e84d7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.870451] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1070.870451] env[69475]: value = "task-3508807" [ 1070.870451] env[69475]: _type = "Task" [ 1070.870451] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.879262] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508807, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.123256] env[69475]: DEBUG oslo_concurrency.lockutils [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.797s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.126065] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 6.738s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1071.150343] env[69475]: INFO nova.scheduler.client.report [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Deleted allocations for instance 951c225b-d930-449f-81b5-4f28f9dd27e5 [ 1071.286280] env[69475]: DEBUG oslo_vmware.api [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508806, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201776} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.286577] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1071.286695] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1071.286860] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1071.287037] env[69475]: INFO nova.compute.manager [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1071.287314] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1071.287458] env[69475]: DEBUG nova.compute.manager [-] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1071.287550] env[69475]: DEBUG nova.network.neutron [-] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1071.379803] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508807, 'name': Rename_Task, 'duration_secs': 0.134265} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.380174] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1071.380320] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-34e7cea5-0e2d-40ed-87ee-f5e0ed2f3907 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.386875] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1071.386875] env[69475]: value = "task-3508808" [ 1071.386875] env[69475]: _type = "Task" [ 1071.386875] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.394271] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508808, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.579708] env[69475]: DEBUG nova.compute.manager [req-59a624a5-a0d1-497e-af97-f01d4b1232ad req-21ce851e-a0a5-482c-a7ae-c6a9a8ae59dc service nova] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Received event network-vif-deleted-20ebbb40-d77a-4c82-8038-5b810eb735db {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1071.579708] env[69475]: INFO nova.compute.manager [req-59a624a5-a0d1-497e-af97-f01d4b1232ad req-21ce851e-a0a5-482c-a7ae-c6a9a8ae59dc service nova] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Neutron deleted interface 20ebbb40-d77a-4c82-8038-5b810eb735db; detaching it from the instance and deleting it from the info cache [ 1071.579708] env[69475]: DEBUG nova.network.neutron [req-59a624a5-a0d1-497e-af97-f01d4b1232ad req-21ce851e-a0a5-482c-a7ae-c6a9a8ae59dc service nova] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1071.631490] env[69475]: INFO nova.compute.claims [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1071.658464] env[69475]: DEBUG oslo_concurrency.lockutils [None req-58307446-802b-4332-88ac-0e5157d8ec0e tempest-ServerPasswordTestJSON-443913206 tempest-ServerPasswordTestJSON-443913206-project-member] Lock "951c225b-d930-449f-81b5-4f28f9dd27e5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.820s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.897271] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508808, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.061693] env[69475]: DEBUG nova.network.neutron [-] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.081413] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3eb4a8b0-960f-484d-8e62-cc7784bff959 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.093260] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6a9c2c-1dde-4f86-8078-d0746c06f6d9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.124305] env[69475]: DEBUG nova.compute.manager [req-59a624a5-a0d1-497e-af97-f01d4b1232ad req-21ce851e-a0a5-482c-a7ae-c6a9a8ae59dc service nova] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Detach interface failed, port_id=20ebbb40-d77a-4c82-8038-5b810eb735db, reason: Instance 226afd68-34d8-482e-89f9-0c45a300a803 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1072.137439] env[69475]: INFO nova.compute.resource_tracker [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating resource usage from migration 4fac22fe-611e-4785-b07c-06b01264b8f7 [ 1072.366290] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3919b94-7781-418d-81e9-f706e53184cf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.374424] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025e4f82-3117-465f-98ec-d9b352751de9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.407349] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4700c040-3b82-4085-a497-4255f6eec218 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.416494] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508808, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.417702] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-786c6818-ac0e-4659-b6f8-9ea298f465de {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.431690] env[69475]: DEBUG nova.compute.provider_tree [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1072.564878] env[69475]: INFO nova.compute.manager [-] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Took 1.28 seconds to deallocate network for instance. [ 1072.918573] env[69475]: DEBUG oslo_vmware.api [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508808, 'name': PowerOnVM_Task, 'duration_secs': 1.20022} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.922019] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1072.922019] env[69475]: DEBUG nova.compute.manager [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1072.922019] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0321e372-1d6e-4c55-b1e3-2c5c99bc2168 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.936414] env[69475]: DEBUG nova.scheduler.client.report [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1073.072464] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.440025] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.442894] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.317s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.443125] env[69475]: INFO nova.compute.manager [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Migrating [ 1073.452769] env[69475]: DEBUG oslo_concurrency.lockutils [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.337s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.454335] env[69475]: INFO nova.compute.claims [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1073.970012] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Acquiring lock "97013703-3506-4441-b80c-cbb5c7e29bdf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.970261] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Lock "97013703-3506-4441-b80c-cbb5c7e29bdf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.970481] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Acquiring lock "97013703-3506-4441-b80c-cbb5c7e29bdf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.970667] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Lock "97013703-3506-4441-b80c-cbb5c7e29bdf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.970837] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Lock "97013703-3506-4441-b80c-cbb5c7e29bdf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1073.979029] env[69475]: INFO nova.compute.manager [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Terminating instance [ 1073.979029] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.979029] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1073.979029] env[69475]: DEBUG nova.network.neutron [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1074.151783] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07100003-4d71-41ac-a241-65be883c8d22 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "interface-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.151783] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07100003-4d71-41ac-a241-65be883c8d22 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.151783] env[69475]: DEBUG nova.objects.instance [None req-07100003-4d71-41ac-a241-65be883c8d22 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lazy-loading 'flavor' on Instance uuid 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1074.325499] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1074.325822] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1074.488024] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Acquiring lock "refresh_cache-97013703-3506-4441-b80c-cbb5c7e29bdf" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.488024] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Acquired lock "refresh_cache-97013703-3506-4441-b80c-cbb5c7e29bdf" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.488024] env[69475]: DEBUG nova.network.neutron [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1074.655678] env[69475]: DEBUG nova.objects.instance [None req-07100003-4d71-41ac-a241-65be883c8d22 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lazy-loading 'pci_requests' on Instance uuid 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1074.744693] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57164d99-2bb0-45d4-884d-a6c3c21623f8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.753294] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fdc7bc0-9e51-4496-8620-dcd1e03f65cc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.791911] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a06c16ba-6a84-438b-a6e6-554bf53218fe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.799863] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a77e329-b40e-426e-9f6c-ec25d4f48a15 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.813643] env[69475]: DEBUG nova.compute.provider_tree [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1074.832404] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1074.833026] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1074.833026] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1074.833026] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1074.833026] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1074.833281] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1074.833281] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69475) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1074.835620] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager.update_available_resource {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1074.839418] env[69475]: DEBUG nova.network.neutron [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating instance_info_cache with network_info: [{"id": "4059da75-efc8-42ee-90b1-8202220d1621", "address": "fa:16:3e:1e:8b:99", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4059da75-ef", "ovs_interfaceid": "4059da75-efc8-42ee-90b1-8202220d1621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.005934] env[69475]: DEBUG nova.network.neutron [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1075.059587] env[69475]: DEBUG nova.network.neutron [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.158697] env[69475]: DEBUG nova.objects.base [None req-07100003-4d71-41ac-a241-65be883c8d22 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Object Instance<1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35> lazy-loaded attributes: flavor,pci_requests {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1075.158916] env[69475]: DEBUG nova.network.neutron [None req-07100003-4d71-41ac-a241-65be883c8d22 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1075.255020] env[69475]: DEBUG oslo_concurrency.lockutils [None req-07100003-4d71-41ac-a241-65be883c8d22 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.104s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.317504] env[69475]: DEBUG nova.scheduler.client.report [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1075.339498] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.342330] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1075.563082] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Releasing lock "refresh_cache-97013703-3506-4441-b80c-cbb5c7e29bdf" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1075.563082] env[69475]: DEBUG nova.compute.manager [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1075.564205] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1075.564205] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eba0153-50f9-4f0b-b04a-45f48c298421 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.572606] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1075.572904] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da3654b2-add0-4514-a6df-3f91ef126fba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.578676] env[69475]: DEBUG oslo_vmware.api [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1075.578676] env[69475]: value = "task-3508809" [ 1075.578676] env[69475]: _type = "Task" [ 1075.578676] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.586531] env[69475]: DEBUG oslo_vmware.api [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508809, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.825711] env[69475]: DEBUG oslo_concurrency.lockutils [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.373s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.826247] env[69475]: DEBUG nova.compute.manager [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1075.828960] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.711s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.829552] env[69475]: DEBUG nova.objects.instance [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lazy-loading 'pci_requests' on Instance uuid 8f18d683-7734-4798-8963-7336fe229f16 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1076.089509] env[69475]: DEBUG oslo_vmware.api [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508809, 'name': PowerOffVM_Task, 'duration_secs': 0.118342} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.089783] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1076.089949] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1076.090222] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e975a491-e4e8-472e-9e71-efc543e7d3e5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.113251] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1076.113485] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1076.113773] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Deleting the datastore file [datastore1] 97013703-3506-4441-b80c-cbb5c7e29bdf {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1076.113919] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3210d370-f820-471d-92cf-d5a6a4969f1e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.119548] env[69475]: DEBUG oslo_vmware.api [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for the task: (returnval){ [ 1076.119548] env[69475]: value = "task-3508811" [ 1076.119548] env[69475]: _type = "Task" [ 1076.119548] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.127202] env[69475]: DEBUG oslo_vmware.api [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508811, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.332869] env[69475]: DEBUG nova.compute.utils [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1076.337347] env[69475]: DEBUG nova.objects.instance [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lazy-loading 'numa_topology' on Instance uuid 8f18d683-7734-4798-8963-7336fe229f16 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1076.337347] env[69475]: DEBUG nova.compute.manager [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1076.337347] env[69475]: DEBUG nova.network.neutron [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1076.386246] env[69475]: DEBUG nova.policy [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1dbe602ef9f64662ac728252f5259321', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '572bc56741e24d57a4d01f202c8fb78d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1076.629423] env[69475]: DEBUG oslo_vmware.api [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Task: {'id': task-3508811, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101943} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.629970] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1076.630214] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1076.630505] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1076.630773] env[69475]: INFO nova.compute.manager [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1076.631032] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1076.632027] env[69475]: DEBUG nova.compute.manager [-] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1076.632231] env[69475]: DEBUG nova.network.neutron [-] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1076.650341] env[69475]: DEBUG nova.network.neutron [-] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1076.792938] env[69475]: DEBUG nova.network.neutron [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Successfully created port: e27cfabc-cd13-4aaa-b9e1-eebffb18225e {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1076.839558] env[69475]: INFO nova.compute.claims [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1076.842243] env[69475]: DEBUG nova.compute.manager [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1076.856829] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdf75ae6-f672-463c-bc3e-1b9a2310bea6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.877738] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating instance 'baf27027-678d-4167-bb9b-df410aeb0e82' progress to 0 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1077.154555] env[69475]: DEBUG nova.network.neutron [-] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1077.383994] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1077.384537] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd768627-7b87-41c1-a237-58f7242a0a5d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.392541] env[69475]: DEBUG oslo_vmware.api [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1077.392541] env[69475]: value = "task-3508812" [ 1077.392541] env[69475]: _type = "Task" [ 1077.392541] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.400481] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "interface-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.400712] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.401020] env[69475]: DEBUG nova.objects.instance [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lazy-loading 'flavor' on Instance uuid 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1077.402535] env[69475]: DEBUG oslo_vmware.api [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508812, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.657124] env[69475]: INFO nova.compute.manager [-] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Took 1.02 seconds to deallocate network for instance. [ 1077.856454] env[69475]: DEBUG nova.compute.manager [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1077.882085] env[69475]: DEBUG nova.virt.hardware [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1077.882361] env[69475]: DEBUG nova.virt.hardware [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1077.882541] env[69475]: DEBUG nova.virt.hardware [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1077.882726] env[69475]: DEBUG nova.virt.hardware [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1077.882891] env[69475]: DEBUG nova.virt.hardware [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1077.883056] env[69475]: DEBUG nova.virt.hardware [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1077.883314] env[69475]: DEBUG nova.virt.hardware [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1077.883483] env[69475]: DEBUG nova.virt.hardware [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1077.883712] env[69475]: DEBUG nova.virt.hardware [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1077.883904] env[69475]: DEBUG nova.virt.hardware [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1077.884168] env[69475]: DEBUG nova.virt.hardware [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1077.885125] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea26fafe-73c4-4d92-93bb-3c592ddcb52c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.895330] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c261ca-ea8a-418b-80dc-8f27714b3f29 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.916845] env[69475]: DEBUG oslo_vmware.api [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508812, 'name': PowerOffVM_Task, 'duration_secs': 0.223021} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.919550] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1077.919768] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating instance 'baf27027-678d-4167-bb9b-df410aeb0e82' progress to 17 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1077.996418] env[69475]: DEBUG nova.objects.instance [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lazy-loading 'pci_requests' on Instance uuid 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1078.105862] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ffed010-3ac2-47d2-aea2-7a7d02eb95b8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.113556] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb44a76-9308-4c74-945e-439cdca12577 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.144883] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fcf5ca9-b77e-49bb-a653-93cd14541cb1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.152148] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7b13bf-6912-4993-b10e-b694985b22ca {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.165726] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.166232] env[69475]: DEBUG nova.compute.provider_tree [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.426367] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1078.426621] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1078.426794] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1078.426998] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1078.427167] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1078.427314] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1078.427519] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1078.427676] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1078.427841] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1078.428034] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1078.428222] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1078.433767] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a3b7449-1877-4148-866c-627d12ca0278 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.451075] env[69475]: DEBUG oslo_vmware.api [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1078.451075] env[69475]: value = "task-3508813" [ 1078.451075] env[69475]: _type = "Task" [ 1078.451075] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.458972] env[69475]: DEBUG oslo_vmware.api [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508813, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.500124] env[69475]: DEBUG nova.objects.base [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Object Instance<1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35> lazy-loaded attributes: flavor,pci_requests {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1078.500366] env[69475]: DEBUG nova.network.neutron [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1078.527391] env[69475]: DEBUG nova.network.neutron [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Successfully updated port: e27cfabc-cd13-4aaa-b9e1-eebffb18225e {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1078.556012] env[69475]: DEBUG nova.policy [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc345af1a2c34fba98fa191b637a284a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2ba1a4125454d39bc92b6123447d98a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1078.660606] env[69475]: DEBUG nova.compute.manager [req-d771201a-a981-438e-82d7-16c3d9480f56 req-f69922f0-f93c-40c7-bf96-b49612207c00 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Received event network-vif-plugged-e27cfabc-cd13-4aaa-b9e1-eebffb18225e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1078.660840] env[69475]: DEBUG oslo_concurrency.lockutils [req-d771201a-a981-438e-82d7-16c3d9480f56 req-f69922f0-f93c-40c7-bf96-b49612207c00 service nova] Acquiring lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.660967] env[69475]: DEBUG oslo_concurrency.lockutils [req-d771201a-a981-438e-82d7-16c3d9480f56 req-f69922f0-f93c-40c7-bf96-b49612207c00 service nova] Lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1078.661119] env[69475]: DEBUG oslo_concurrency.lockutils [req-d771201a-a981-438e-82d7-16c3d9480f56 req-f69922f0-f93c-40c7-bf96-b49612207c00 service nova] Lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.661282] env[69475]: DEBUG nova.compute.manager [req-d771201a-a981-438e-82d7-16c3d9480f56 req-f69922f0-f93c-40c7-bf96-b49612207c00 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] No waiting events found dispatching network-vif-plugged-e27cfabc-cd13-4aaa-b9e1-eebffb18225e {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1078.661444] env[69475]: WARNING nova.compute.manager [req-d771201a-a981-438e-82d7-16c3d9480f56 req-f69922f0-f93c-40c7-bf96-b49612207c00 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Received unexpected event network-vif-plugged-e27cfabc-cd13-4aaa-b9e1-eebffb18225e for instance with vm_state building and task_state spawning. [ 1078.669141] env[69475]: DEBUG nova.scheduler.client.report [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1078.832528] env[69475]: DEBUG nova.network.neutron [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Successfully created port: e60a34f2-9926-41dc-a777-3d0e92f22ce9 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1078.963411] env[69475]: DEBUG oslo_vmware.api [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508813, 'name': ReconfigVM_Task, 'duration_secs': 0.1722} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.963757] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating instance 'baf27027-678d-4167-bb9b-df410aeb0e82' progress to 33 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1079.030411] env[69475]: DEBUG oslo_concurrency.lockutils [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1079.030411] env[69475]: DEBUG oslo_concurrency.lockutils [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquired lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1079.030411] env[69475]: DEBUG nova.network.neutron [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1079.173892] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.345s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.176871] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.525s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.178779] env[69475]: INFO nova.compute.claims [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1079.221075] env[69475]: INFO nova.network.neutron [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Updating port 0241fad0-a699-4ab6-8665-37a808867cd9 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1079.470490] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1079.470890] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1079.471423] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1079.471774] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1079.472054] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1079.472268] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1079.472531] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1079.472947] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1079.473287] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1079.473677] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1079.474068] env[69475]: DEBUG nova.virt.hardware [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1079.479717] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Reconfiguring VM instance instance-0000002c to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1079.480387] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0e8855a-4382-49e1-b589-d6daf8c49a78 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.500284] env[69475]: DEBUG oslo_vmware.api [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1079.500284] env[69475]: value = "task-3508814" [ 1079.500284] env[69475]: _type = "Task" [ 1079.500284] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1079.508341] env[69475]: DEBUG oslo_vmware.api [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508814, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1079.559485] env[69475]: DEBUG nova.network.neutron [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1079.767026] env[69475]: DEBUG nova.network.neutron [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Updating instance_info_cache with network_info: [{"id": "e27cfabc-cd13-4aaa-b9e1-eebffb18225e", "address": "fa:16:3e:9f:f8:e1", "network": {"id": "62083b30-e966-4802-8960-367f1137a879", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-79648352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572bc56741e24d57a4d01f202c8fb78d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape27cfabc-cd", "ovs_interfaceid": "e27cfabc-cd13-4aaa-b9e1-eebffb18225e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1080.010051] env[69475]: DEBUG oslo_vmware.api [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508814, 'name': ReconfigVM_Task, 'duration_secs': 0.209329} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.010471] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Reconfigured VM instance instance-0000002c to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1080.011354] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d514b17-690a-4105-b4a2-f9cb583e71bd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.038130] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Reconfiguring VM instance instance-0000002c to attach disk [datastore2] baf27027-678d-4167-bb9b-df410aeb0e82/baf27027-678d-4167-bb9b-df410aeb0e82.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1080.038915] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ac8610f-8136-4fae-95a5-97a1f7763f45 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.059925] env[69475]: DEBUG oslo_vmware.api [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1080.059925] env[69475]: value = "task-3508815" [ 1080.059925] env[69475]: _type = "Task" [ 1080.059925] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.068608] env[69475]: DEBUG oslo_vmware.api [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508815, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.229967] env[69475]: DEBUG nova.compute.manager [req-adcb6f03-fbc5-4436-aa7c-67af09d7bc1c req-a32a2b18-95cc-4c14-9e84-dcde90ef3802 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Received event network-vif-plugged-e60a34f2-9926-41dc-a777-3d0e92f22ce9 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1080.229967] env[69475]: DEBUG oslo_concurrency.lockutils [req-adcb6f03-fbc5-4436-aa7c-67af09d7bc1c req-a32a2b18-95cc-4c14-9e84-dcde90ef3802 service nova] Acquiring lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.229967] env[69475]: DEBUG oslo_concurrency.lockutils [req-adcb6f03-fbc5-4436-aa7c-67af09d7bc1c req-a32a2b18-95cc-4c14-9e84-dcde90ef3802 service nova] Lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1080.229967] env[69475]: DEBUG oslo_concurrency.lockutils [req-adcb6f03-fbc5-4436-aa7c-67af09d7bc1c req-a32a2b18-95cc-4c14-9e84-dcde90ef3802 service nova] Lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.229967] env[69475]: DEBUG nova.compute.manager [req-adcb6f03-fbc5-4436-aa7c-67af09d7bc1c req-a32a2b18-95cc-4c14-9e84-dcde90ef3802 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] No waiting events found dispatching network-vif-plugged-e60a34f2-9926-41dc-a777-3d0e92f22ce9 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1080.229967] env[69475]: WARNING nova.compute.manager [req-adcb6f03-fbc5-4436-aa7c-67af09d7bc1c req-a32a2b18-95cc-4c14-9e84-dcde90ef3802 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Received unexpected event network-vif-plugged-e60a34f2-9926-41dc-a777-3d0e92f22ce9 for instance with vm_state active and task_state None. [ 1080.269040] env[69475]: DEBUG oslo_concurrency.lockutils [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Releasing lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1080.270118] env[69475]: DEBUG nova.compute.manager [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Instance network_info: |[{"id": "e27cfabc-cd13-4aaa-b9e1-eebffb18225e", "address": "fa:16:3e:9f:f8:e1", "network": {"id": "62083b30-e966-4802-8960-367f1137a879", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-79648352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572bc56741e24d57a4d01f202c8fb78d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape27cfabc-cd", "ovs_interfaceid": "e27cfabc-cd13-4aaa-b9e1-eebffb18225e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1080.273022] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:f8:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e27cfabc-cd13-4aaa-b9e1-eebffb18225e', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1080.279460] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1080.282266] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1080.285474] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4ffcebd-5702-4c3b-ae24-16eb01f257e8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.309729] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1080.309729] env[69475]: value = "task-3508816" [ 1080.309729] env[69475]: _type = "Task" [ 1080.309729] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.318248] env[69475]: DEBUG nova.network.neutron [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Successfully updated port: e60a34f2-9926-41dc-a777-3d0e92f22ce9 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1080.322434] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508816, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.448964] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf16490-6aaf-4559-b6d5-d6bcdac384bb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.455732] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-238e9c8b-0a18-4138-95ce-b4521bf999d4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.485804] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64c24b60-ba45-4751-9b16-17f53b5495df {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.493026] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5462b4-b16e-480b-aff9-c4be1a23b9b5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.506541] env[69475]: DEBUG nova.compute.provider_tree [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1080.568264] env[69475]: DEBUG oslo_vmware.api [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508815, 'name': ReconfigVM_Task, 'duration_secs': 0.268883} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.568995] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Reconfigured VM instance instance-0000002c to attach disk [datastore2] baf27027-678d-4167-bb9b-df410aeb0e82/baf27027-678d-4167-bb9b-df410aeb0e82.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1080.569940] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating instance 'baf27027-678d-4167-bb9b-df410aeb0e82' progress to 50 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1080.746841] env[69475]: DEBUG nova.compute.manager [req-2565c099-e94b-4d00-b407-bc8e735fc77c req-2f6ed2a6-8b98-4dff-92fc-119cc5f36b6f service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Received event network-changed-e27cfabc-cd13-4aaa-b9e1-eebffb18225e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1080.746841] env[69475]: DEBUG nova.compute.manager [req-2565c099-e94b-4d00-b407-bc8e735fc77c req-2f6ed2a6-8b98-4dff-92fc-119cc5f36b6f service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Refreshing instance network info cache due to event network-changed-e27cfabc-cd13-4aaa-b9e1-eebffb18225e. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1080.747476] env[69475]: DEBUG oslo_concurrency.lockutils [req-2565c099-e94b-4d00-b407-bc8e735fc77c req-2f6ed2a6-8b98-4dff-92fc-119cc5f36b6f service nova] Acquiring lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.747862] env[69475]: DEBUG oslo_concurrency.lockutils [req-2565c099-e94b-4d00-b407-bc8e735fc77c req-2f6ed2a6-8b98-4dff-92fc-119cc5f36b6f service nova] Acquired lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1080.748172] env[69475]: DEBUG nova.network.neutron [req-2565c099-e94b-4d00-b407-bc8e735fc77c req-2f6ed2a6-8b98-4dff-92fc-119cc5f36b6f service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Refreshing network info cache for port e27cfabc-cd13-4aaa-b9e1-eebffb18225e {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1080.822027] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508816, 'name': CreateVM_Task, 'duration_secs': 0.325474} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.822027] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1080.822027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.822027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1080.822027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1080.822027] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f64193fb-3c4a-42d9-956e-5cd16c05a990 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.823978] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1080.824406] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1080.824994] env[69475]: DEBUG nova.network.neutron [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1080.831149] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1080.831149] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5255dbb9-c62a-8467-a101-5c517e190910" [ 1080.831149] env[69475]: _type = "Task" [ 1080.831149] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.841239] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5255dbb9-c62a-8467-a101-5c517e190910, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.010570] env[69475]: DEBUG nova.scheduler.client.report [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1081.074335] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.074335] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquired lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.074335] env[69475]: DEBUG nova.network.neutron [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1081.080063] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aff9e95-462f-4e28-a6cb-dbc47066b434 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.111831] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-182528de-be73-4a4c-86d3-6e67990b55ca {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.130155] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating instance 'baf27027-678d-4167-bb9b-df410aeb0e82' progress to 67 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1081.341099] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5255dbb9-c62a-8467-a101-5c517e190910, 'name': SearchDatastore_Task, 'duration_secs': 0.009348} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.341389] env[69475]: DEBUG oslo_concurrency.lockutils [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1081.341617] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1081.341844] env[69475]: DEBUG oslo_concurrency.lockutils [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.342287] env[69475]: DEBUG oslo_concurrency.lockutils [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.342287] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1081.342495] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-56581aaf-d6aa-457c-8b67-f42457675ae3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.351814] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1081.352010] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1081.352755] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-326e47ee-4d79-4eb0-ac3a-d2aaff12aba7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.357506] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1081.357506] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52bf13a2-2c3b-e544-f73e-a88c34d16d14" [ 1081.357506] env[69475]: _type = "Task" [ 1081.357506] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.364811] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52bf13a2-2c3b-e544-f73e-a88c34d16d14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.378381] env[69475]: WARNING nova.network.neutron [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] 801aee55-f715-4cdf-b89c-184ca3f24866 already exists in list: networks containing: ['801aee55-f715-4cdf-b89c-184ca3f24866']. ignoring it [ 1081.461391] env[69475]: DEBUG nova.network.neutron [req-2565c099-e94b-4d00-b407-bc8e735fc77c req-2f6ed2a6-8b98-4dff-92fc-119cc5f36b6f service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Updated VIF entry in instance network info cache for port e27cfabc-cd13-4aaa-b9e1-eebffb18225e. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1081.461391] env[69475]: DEBUG nova.network.neutron [req-2565c099-e94b-4d00-b407-bc8e735fc77c req-2f6ed2a6-8b98-4dff-92fc-119cc5f36b6f service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Updating instance_info_cache with network_info: [{"id": "e27cfabc-cd13-4aaa-b9e1-eebffb18225e", "address": "fa:16:3e:9f:f8:e1", "network": {"id": "62083b30-e966-4802-8960-367f1137a879", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-79648352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572bc56741e24d57a4d01f202c8fb78d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape27cfabc-cd", "ovs_interfaceid": "e27cfabc-cd13-4aaa-b9e1-eebffb18225e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.516918] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.340s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.517472] env[69475]: DEBUG nova.compute.manager [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1081.520129] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.363s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.521923] env[69475]: INFO nova.compute.claims [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1081.699976] env[69475]: DEBUG nova.network.neutron [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Port 4059da75-efc8-42ee-90b1-8202220d1621 binding to destination host cpu-1 is already ACTIVE {{(pid=69475) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1081.717551] env[69475]: DEBUG nova.network.neutron [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Updating instance_info_cache with network_info: [{"id": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "address": "fa:16:3e:34:5a:a2", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d51ee71-84", "ovs_interfaceid": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e60a34f2-9926-41dc-a777-3d0e92f22ce9", "address": "fa:16:3e:1b:e3:de", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape60a34f2-99", "ovs_interfaceid": "e60a34f2-9926-41dc-a777-3d0e92f22ce9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.876734] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52bf13a2-2c3b-e544-f73e-a88c34d16d14, 'name': SearchDatastore_Task, 'duration_secs': 0.008525} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1081.876734] env[69475]: DEBUG nova.network.neutron [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Updating instance_info_cache with network_info: [{"id": "0241fad0-a699-4ab6-8665-37a808867cd9", "address": "fa:16:3e:9e:27:a0", "network": {"id": "77196001-28c0-48c7-924d-a11c93289ae5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-829999270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d25a22195d0c4370a481a242a18f430a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0241fad0-a6", "ovs_interfaceid": "0241fad0-a699-4ab6-8665-37a808867cd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.881046] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b23a9148-df49-4b92-94d5-24f04c8d1fe3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.882988] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1081.882988] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528ad16f-d0bf-d3a1-71e7-eb28d883e5cf" [ 1081.882988] env[69475]: _type = "Task" [ 1081.882988] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1081.893226] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528ad16f-d0bf-d3a1-71e7-eb28d883e5cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1081.962528] env[69475]: DEBUG oslo_concurrency.lockutils [req-2565c099-e94b-4d00-b407-bc8e735fc77c req-2f6ed2a6-8b98-4dff-92fc-119cc5f36b6f service nova] Releasing lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.026505] env[69475]: DEBUG nova.compute.utils [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1082.031720] env[69475]: DEBUG nova.compute.manager [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1082.031918] env[69475]: DEBUG nova.network.neutron [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1082.092486] env[69475]: DEBUG nova.policy [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a007f7a31e5a4c0eb07bd8bf5d26cf2c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a68f54aa603f46468f50c83cd4fa3e8c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1082.221207] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.222644] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.225353] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.226264] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77082baa-217a-4723-b3b6-c43df9c873f3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.247044] env[69475]: DEBUG nova.virt.hardware [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1082.247189] env[69475]: DEBUG nova.virt.hardware [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1082.247352] env[69475]: DEBUG nova.virt.hardware [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1082.247563] env[69475]: DEBUG nova.virt.hardware [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1082.247741] env[69475]: DEBUG nova.virt.hardware [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1082.247916] env[69475]: DEBUG nova.virt.hardware [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1082.248223] env[69475]: DEBUG nova.virt.hardware [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1082.248439] env[69475]: DEBUG nova.virt.hardware [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1082.248642] env[69475]: DEBUG nova.virt.hardware [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1082.248850] env[69475]: DEBUG nova.virt.hardware [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1082.249068] env[69475]: DEBUG nova.virt.hardware [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1082.256090] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Reconfiguring VM to attach interface {{(pid=69475) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1082.256453] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62fa0e12-5e12-4e52-aaa2-f05330b75868 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.271859] env[69475]: DEBUG nova.compute.manager [req-0d7cec62-20c1-4206-ad92-a9e7e6626268 req-17280663-2224-4d94-bc1a-f0a68c7237a7 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Received event network-changed-e60a34f2-9926-41dc-a777-3d0e92f22ce9 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1082.272101] env[69475]: DEBUG nova.compute.manager [req-0d7cec62-20c1-4206-ad92-a9e7e6626268 req-17280663-2224-4d94-bc1a-f0a68c7237a7 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Refreshing instance network info cache due to event network-changed-e60a34f2-9926-41dc-a777-3d0e92f22ce9. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1082.272356] env[69475]: DEBUG oslo_concurrency.lockutils [req-0d7cec62-20c1-4206-ad92-a9e7e6626268 req-17280663-2224-4d94-bc1a-f0a68c7237a7 service nova] Acquiring lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.272583] env[69475]: DEBUG oslo_concurrency.lockutils [req-0d7cec62-20c1-4206-ad92-a9e7e6626268 req-17280663-2224-4d94-bc1a-f0a68c7237a7 service nova] Acquired lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.272891] env[69475]: DEBUG nova.network.neutron [req-0d7cec62-20c1-4206-ad92-a9e7e6626268 req-17280663-2224-4d94-bc1a-f0a68c7237a7 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Refreshing network info cache for port e60a34f2-9926-41dc-a777-3d0e92f22ce9 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1082.279539] env[69475]: DEBUG oslo_vmware.api [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1082.279539] env[69475]: value = "task-3508817" [ 1082.279539] env[69475]: _type = "Task" [ 1082.279539] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.289299] env[69475]: DEBUG oslo_vmware.api [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508817, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.380684] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Releasing lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.394441] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528ad16f-d0bf-d3a1-71e7-eb28d883e5cf, 'name': SearchDatastore_Task, 'duration_secs': 0.010729} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1082.394768] env[69475]: DEBUG oslo_concurrency.lockutils [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.395133] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] e10a197a-a9b7-43ce-b8a8-ce186619feb9/e10a197a-a9b7-43ce-b8a8-ce186619feb9.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1082.395448] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-55da9fa7-d7ab-4cad-95a6-16565c4dac05 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.400737] env[69475]: DEBUG nova.network.neutron [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Successfully created port: b2b04f22-0a1e-4c90-b84f-5d119fc7e528 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1082.406251] env[69475]: DEBUG nova.virt.hardware [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='7c0e90fd63bf1d316dab58a83285fecc',container_format='bare',created_at=2025-04-22T09:42:55Z,direct_url=,disk_format='vmdk',id=a9351d13-720c-49e6-a8e9-3fac7da2b98a,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1497566829-shelved',owner='d25a22195d0c4370a481a242a18f430a',properties=ImageMetaProps,protected=,size=31664640,status='active',tags=,updated_at=2025-04-22T09:43:11Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1082.406546] env[69475]: DEBUG nova.virt.hardware [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1082.407192] env[69475]: DEBUG nova.virt.hardware [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1082.407452] env[69475]: DEBUG nova.virt.hardware [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1082.407641] env[69475]: DEBUG nova.virt.hardware [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1082.407923] env[69475]: DEBUG nova.virt.hardware [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1082.408204] env[69475]: DEBUG nova.virt.hardware [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1082.408404] env[69475]: DEBUG nova.virt.hardware [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1082.408643] env[69475]: DEBUG nova.virt.hardware [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1082.408826] env[69475]: DEBUG nova.virt.hardware [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1082.409161] env[69475]: DEBUG nova.virt.hardware [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1082.410327] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fee1b42a-9cee-4c70-a909-05d1ebacc5e3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.414039] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1082.414039] env[69475]: value = "task-3508818" [ 1082.414039] env[69475]: _type = "Task" [ 1082.414039] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.421450] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d003dde-2fae-469a-88af-3e2c3470bdc8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.428961] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508818, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.441749] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:27:a0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0549820d-5649-40bc-ad6e-9ae27b384d90', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0241fad0-a699-4ab6-8665-37a808867cd9', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1082.449818] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1082.450159] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1082.450960] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3bcea787-648f-4393-bcfc-b03b30a86f74 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.469576] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1082.469576] env[69475]: value = "task-3508819" [ 1082.469576] env[69475]: _type = "Task" [ 1082.469576] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.480351] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508819, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.535442] env[69475]: DEBUG nova.compute.manager [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1082.727945] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "baf27027-678d-4167-bb9b-df410aeb0e82-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.728353] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "baf27027-678d-4167-bb9b-df410aeb0e82-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.728535] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "baf27027-678d-4167-bb9b-df410aeb0e82-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.793479] env[69475]: DEBUG oslo_vmware.api [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508817, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.862072] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94990ebc-36d8-4ccb-ad21-f29617aa4272 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.874170] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e15d6af-8ca4-4348-a137-04a00865f149 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.911646] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb3dca4-92a9-479c-907a-4440af566fd1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.924316] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49cfa767-dfc5-4e99-9aba-e8ff677f6ede {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.932036] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508818, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.937342] env[69475]: DEBUG nova.compute.manager [req-ecd783c3-3a7e-4c89-a582-ab99795d8696 req-d9c503cf-aa94-4939-a1d5-d579a25ae273 service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Received event network-vif-plugged-0241fad0-a699-4ab6-8665-37a808867cd9 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1082.937342] env[69475]: DEBUG oslo_concurrency.lockutils [req-ecd783c3-3a7e-4c89-a582-ab99795d8696 req-d9c503cf-aa94-4939-a1d5-d579a25ae273 service nova] Acquiring lock "8f18d683-7734-4798-8963-7336fe229f16-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.937342] env[69475]: DEBUG oslo_concurrency.lockutils [req-ecd783c3-3a7e-4c89-a582-ab99795d8696 req-d9c503cf-aa94-4939-a1d5-d579a25ae273 service nova] Lock "8f18d683-7734-4798-8963-7336fe229f16-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1082.937342] env[69475]: DEBUG oslo_concurrency.lockutils [req-ecd783c3-3a7e-4c89-a582-ab99795d8696 req-d9c503cf-aa94-4939-a1d5-d579a25ae273 service nova] Lock "8f18d683-7734-4798-8963-7336fe229f16-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1082.937342] env[69475]: DEBUG nova.compute.manager [req-ecd783c3-3a7e-4c89-a582-ab99795d8696 req-d9c503cf-aa94-4939-a1d5-d579a25ae273 service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] No waiting events found dispatching network-vif-plugged-0241fad0-a699-4ab6-8665-37a808867cd9 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1082.937342] env[69475]: WARNING nova.compute.manager [req-ecd783c3-3a7e-4c89-a582-ab99795d8696 req-d9c503cf-aa94-4939-a1d5-d579a25ae273 service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Received unexpected event network-vif-plugged-0241fad0-a699-4ab6-8665-37a808867cd9 for instance with vm_state shelved_offloaded and task_state spawning. [ 1082.937342] env[69475]: DEBUG nova.compute.manager [req-ecd783c3-3a7e-4c89-a582-ab99795d8696 req-d9c503cf-aa94-4939-a1d5-d579a25ae273 service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Received event network-changed-0241fad0-a699-4ab6-8665-37a808867cd9 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1082.937812] env[69475]: DEBUG nova.compute.manager [req-ecd783c3-3a7e-4c89-a582-ab99795d8696 req-d9c503cf-aa94-4939-a1d5-d579a25ae273 service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Refreshing instance network info cache due to event network-changed-0241fad0-a699-4ab6-8665-37a808867cd9. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1082.937812] env[69475]: DEBUG oslo_concurrency.lockutils [req-ecd783c3-3a7e-4c89-a582-ab99795d8696 req-d9c503cf-aa94-4939-a1d5-d579a25ae273 service nova] Acquiring lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.938172] env[69475]: DEBUG oslo_concurrency.lockutils [req-ecd783c3-3a7e-4c89-a582-ab99795d8696 req-d9c503cf-aa94-4939-a1d5-d579a25ae273 service nova] Acquired lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.938172] env[69475]: DEBUG nova.network.neutron [req-ecd783c3-3a7e-4c89-a582-ab99795d8696 req-d9c503cf-aa94-4939-a1d5-d579a25ae273 service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Refreshing network info cache for port 0241fad0-a699-4ab6-8665-37a808867cd9 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1082.953559] env[69475]: DEBUG nova.compute.provider_tree [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.980249] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508819, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.061054] env[69475]: DEBUG nova.network.neutron [req-0d7cec62-20c1-4206-ad92-a9e7e6626268 req-17280663-2224-4d94-bc1a-f0a68c7237a7 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Updated VIF entry in instance network info cache for port e60a34f2-9926-41dc-a777-3d0e92f22ce9. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1083.061587] env[69475]: DEBUG nova.network.neutron [req-0d7cec62-20c1-4206-ad92-a9e7e6626268 req-17280663-2224-4d94-bc1a-f0a68c7237a7 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Updating instance_info_cache with network_info: [{"id": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "address": "fa:16:3e:34:5a:a2", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d51ee71-84", "ovs_interfaceid": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e60a34f2-9926-41dc-a777-3d0e92f22ce9", "address": "fa:16:3e:1b:e3:de", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape60a34f2-99", "ovs_interfaceid": "e60a34f2-9926-41dc-a777-3d0e92f22ce9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.108618] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Acquiring lock "74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.108618] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Lock "74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.291475] env[69475]: DEBUG oslo_vmware.api [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508817, 'name': ReconfigVM_Task, 'duration_secs': 0.631548} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.292211] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.292395] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Reconfigured VM to attach interface {{(pid=69475) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1083.426318] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508818, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538693} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.426674] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] e10a197a-a9b7-43ce-b8a8-ce186619feb9/e10a197a-a9b7-43ce-b8a8-ce186619feb9.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1083.426906] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1083.427194] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a73f6c74-ae20-49d8-8755-7c1498c5a724 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.433288] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1083.433288] env[69475]: value = "task-3508820" [ 1083.433288] env[69475]: _type = "Task" [ 1083.433288] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.442630] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508820, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.460910] env[69475]: DEBUG nova.scheduler.client.report [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1083.474607] env[69475]: DEBUG nova.network.neutron [req-ecd783c3-3a7e-4c89-a582-ab99795d8696 req-d9c503cf-aa94-4939-a1d5-d579a25ae273 service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Updated VIF entry in instance network info cache for port 0241fad0-a699-4ab6-8665-37a808867cd9. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1083.475212] env[69475]: DEBUG nova.network.neutron [req-ecd783c3-3a7e-4c89-a582-ab99795d8696 req-d9c503cf-aa94-4939-a1d5-d579a25ae273 service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Updating instance_info_cache with network_info: [{"id": "0241fad0-a699-4ab6-8665-37a808867cd9", "address": "fa:16:3e:9e:27:a0", "network": {"id": "77196001-28c0-48c7-924d-a11c93289ae5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-829999270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d25a22195d0c4370a481a242a18f430a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0241fad0-a6", "ovs_interfaceid": "0241fad0-a699-4ab6-8665-37a808867cd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.483121] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508819, 'name': CreateVM_Task, 'duration_secs': 0.787948} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.484116] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1083.484724] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9351d13-720c-49e6-a8e9-3fac7da2b98a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.484885] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9351d13-720c-49e6-a8e9-3fac7da2b98a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.485268] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a9351d13-720c-49e6-a8e9-3fac7da2b98a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1083.485716] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b2eb972-0656-4bd7-9dfe-122dd899f58f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.490349] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1083.490349] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52067ee8-b0c2-5d01-ea7f-6b42423520f1" [ 1083.490349] env[69475]: _type = "Task" [ 1083.490349] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.497967] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52067ee8-b0c2-5d01-ea7f-6b42423520f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.547063] env[69475]: DEBUG nova.compute.manager [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1083.564358] env[69475]: DEBUG oslo_concurrency.lockutils [req-0d7cec62-20c1-4206-ad92-a9e7e6626268 req-17280663-2224-4d94-bc1a-f0a68c7237a7 service nova] Releasing lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.573213] env[69475]: DEBUG nova.virt.hardware [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1083.573516] env[69475]: DEBUG nova.virt.hardware [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1083.573622] env[69475]: DEBUG nova.virt.hardware [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1083.573799] env[69475]: DEBUG nova.virt.hardware [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1083.573937] env[69475]: DEBUG nova.virt.hardware [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1083.574138] env[69475]: DEBUG nova.virt.hardware [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1083.574355] env[69475]: DEBUG nova.virt.hardware [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1083.574508] env[69475]: DEBUG nova.virt.hardware [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1083.574668] env[69475]: DEBUG nova.virt.hardware [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1083.578027] env[69475]: DEBUG nova.virt.hardware [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1083.578027] env[69475]: DEBUG nova.virt.hardware [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1083.578027] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc42df9-360f-4622-b4bc-622091882dbb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.583481] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6786ab-9396-4da9-ae41-3943a6242f3c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.612659] env[69475]: DEBUG nova.compute.manager [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1083.761958] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.762167] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.762353] env[69475]: DEBUG nova.network.neutron [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1083.800258] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d41a4646-e4d9-47bd-8254-e0aa9e24fcf5 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.399s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.943851] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508820, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064808} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.944205] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1083.945324] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582618c5-e546-4a9a-a0fe-8c1baa6e08f8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.967147] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] e10a197a-a9b7-43ce-b8a8-ce186619feb9/e10a197a-a9b7-43ce-b8a8-ce186619feb9.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1083.967428] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6a00bde-1e7a-42ce-858f-008cb36132cb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.981994] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.982521] env[69475]: DEBUG nova.compute.manager [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1083.985137] env[69475]: DEBUG oslo_concurrency.lockutils [req-ecd783c3-3a7e-4c89-a582-ab99795d8696 req-d9c503cf-aa94-4939-a1d5-d579a25ae273 service nova] Releasing lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.985799] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.056s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.987211] env[69475]: INFO nova.compute.claims [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1083.997138] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1083.997138] env[69475]: value = "task-3508821" [ 1083.997138] env[69475]: _type = "Task" [ 1083.997138] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.001808] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9351d13-720c-49e6-a8e9-3fac7da2b98a" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.002047] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Processing image a9351d13-720c-49e6-a8e9-3fac7da2b98a {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1084.002279] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a9351d13-720c-49e6-a8e9-3fac7da2b98a/a9351d13-720c-49e6-a8e9-3fac7da2b98a.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.002416] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a9351d13-720c-49e6-a8e9-3fac7da2b98a/a9351d13-720c-49e6-a8e9-3fac7da2b98a.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.002612] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1084.002844] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-26cdd998-7e55-4ffb-b0f9-e74064b8c40b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.009443] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508821, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.011757] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1084.011937] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1084.012662] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae0ffdce-580c-43bd-b080-93400faf6858 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.018182] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1084.018182] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524a7d25-ff00-c7fc-2737-16743315f2c5" [ 1084.018182] env[69475]: _type = "Task" [ 1084.018182] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.026696] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524a7d25-ff00-c7fc-2737-16743315f2c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.142751] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1084.217069] env[69475]: DEBUG nova.network.neutron [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Successfully updated port: b2b04f22-0a1e-4c90-b84f-5d119fc7e528 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1084.487271] env[69475]: DEBUG nova.compute.utils [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1084.489177] env[69475]: DEBUG nova.compute.manager [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1084.489357] env[69475]: DEBUG nova.network.neutron [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1084.508706] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508821, 'name': ReconfigVM_Task, 'duration_secs': 0.290177} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.508963] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Reconfigured VM instance instance-00000068 to attach disk [datastore1] e10a197a-a9b7-43ce-b8a8-ce186619feb9/e10a197a-a9b7-43ce-b8a8-ce186619feb9.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1084.509563] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4e377042-387f-40d0-8eca-320243c5a809 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.515937] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1084.515937] env[69475]: value = "task-3508822" [ 1084.515937] env[69475]: _type = "Task" [ 1084.515937] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.525837] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508822, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.528974] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Preparing fetch location {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1084.529209] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Fetch image to [datastore1] OSTACK_IMG_60295392-277f-4bb5-842c-e2b626ff7dd3/OSTACK_IMG_60295392-277f-4bb5-842c-e2b626ff7dd3.vmdk {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1084.529390] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Downloading stream optimized image a9351d13-720c-49e6-a8e9-3fac7da2b98a to [datastore1] OSTACK_IMG_60295392-277f-4bb5-842c-e2b626ff7dd3/OSTACK_IMG_60295392-277f-4bb5-842c-e2b626ff7dd3.vmdk on the data store datastore1 as vApp {{(pid=69475) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1084.529554] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Downloading image file data a9351d13-720c-49e6-a8e9-3fac7da2b98a to the ESX as VM named 'OSTACK_IMG_60295392-277f-4bb5-842c-e2b626ff7dd3' {{(pid=69475) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1084.573682] env[69475]: DEBUG nova.policy [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e08b59cbbeb3415c87bdbffdefbce422', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '79b83e6128844b2eae71ecc046ff483f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1084.584272] env[69475]: DEBUG nova.network.neutron [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating instance_info_cache with network_info: [{"id": "4059da75-efc8-42ee-90b1-8202220d1621", "address": "fa:16:3e:1e:8b:99", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4059da75-ef", "ovs_interfaceid": "4059da75-efc8-42ee-90b1-8202220d1621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.611956] env[69475]: DEBUG oslo_vmware.rw_handles [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1084.611956] env[69475]: value = "resgroup-9" [ 1084.611956] env[69475]: _type = "ResourcePool" [ 1084.611956] env[69475]: }. {{(pid=69475) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1084.612259] env[69475]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-d373146c-e8e7-4abe-8622-c80654e47986 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.636535] env[69475]: DEBUG oslo_vmware.rw_handles [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lease: (returnval){ [ 1084.636535] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52201df1-eaa9-58e9-4930-280114ed4b79" [ 1084.636535] env[69475]: _type = "HttpNfcLease" [ 1084.636535] env[69475]: } obtained for vApp import into resource pool (val){ [ 1084.636535] env[69475]: value = "resgroup-9" [ 1084.636535] env[69475]: _type = "ResourcePool" [ 1084.636535] env[69475]: }. {{(pid=69475) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1084.636816] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the lease: (returnval){ [ 1084.636816] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52201df1-eaa9-58e9-4930-280114ed4b79" [ 1084.636816] env[69475]: _type = "HttpNfcLease" [ 1084.636816] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1084.643156] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1084.643156] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52201df1-eaa9-58e9-4930-280114ed4b79" [ 1084.643156] env[69475]: _type = "HttpNfcLease" [ 1084.643156] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1084.719914] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.720212] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.723475] env[69475]: DEBUG nova.network.neutron [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1084.996503] env[69475]: DEBUG nova.compute.manager [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1085.029032] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508822, 'name': Rename_Task, 'duration_secs': 0.142353} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.029130] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1085.029339] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e0edbcd3-10ae-4456-b3bf-12898f785ffe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.033447] env[69475]: DEBUG nova.compute.manager [req-5bd5a4a3-993a-49dc-8138-9ab85717aa46 req-a05b6bf0-5e9c-4d4e-ad9d-48ac249b839f service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Received event network-vif-plugged-b2b04f22-0a1e-4c90-b84f-5d119fc7e528 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1085.033672] env[69475]: DEBUG oslo_concurrency.lockutils [req-5bd5a4a3-993a-49dc-8138-9ab85717aa46 req-a05b6bf0-5e9c-4d4e-ad9d-48ac249b839f service nova] Acquiring lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.033880] env[69475]: DEBUG oslo_concurrency.lockutils [req-5bd5a4a3-993a-49dc-8138-9ab85717aa46 req-a05b6bf0-5e9c-4d4e-ad9d-48ac249b839f service nova] Lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.034163] env[69475]: DEBUG oslo_concurrency.lockutils [req-5bd5a4a3-993a-49dc-8138-9ab85717aa46 req-a05b6bf0-5e9c-4d4e-ad9d-48ac249b839f service nova] Lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.034272] env[69475]: DEBUG nova.compute.manager [req-5bd5a4a3-993a-49dc-8138-9ab85717aa46 req-a05b6bf0-5e9c-4d4e-ad9d-48ac249b839f service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] No waiting events found dispatching network-vif-plugged-b2b04f22-0a1e-4c90-b84f-5d119fc7e528 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1085.034516] env[69475]: WARNING nova.compute.manager [req-5bd5a4a3-993a-49dc-8138-9ab85717aa46 req-a05b6bf0-5e9c-4d4e-ad9d-48ac249b839f service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Received unexpected event network-vif-plugged-b2b04f22-0a1e-4c90-b84f-5d119fc7e528 for instance with vm_state building and task_state spawning. [ 1085.034608] env[69475]: DEBUG nova.compute.manager [req-5bd5a4a3-993a-49dc-8138-9ab85717aa46 req-a05b6bf0-5e9c-4d4e-ad9d-48ac249b839f service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Received event network-changed-b2b04f22-0a1e-4c90-b84f-5d119fc7e528 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1085.034793] env[69475]: DEBUG nova.compute.manager [req-5bd5a4a3-993a-49dc-8138-9ab85717aa46 req-a05b6bf0-5e9c-4d4e-ad9d-48ac249b839f service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Refreshing instance network info cache due to event network-changed-b2b04f22-0a1e-4c90-b84f-5d119fc7e528. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1085.034932] env[69475]: DEBUG oslo_concurrency.lockutils [req-5bd5a4a3-993a-49dc-8138-9ab85717aa46 req-a05b6bf0-5e9c-4d4e-ad9d-48ac249b839f service nova] Acquiring lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.036696] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1085.036696] env[69475]: value = "task-3508824" [ 1085.036696] env[69475]: _type = "Task" [ 1085.036696] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.045358] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508824, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.088456] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.149493] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1085.149493] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52201df1-eaa9-58e9-4930-280114ed4b79" [ 1085.149493] env[69475]: _type = "HttpNfcLease" [ 1085.149493] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1085.213508] env[69475]: DEBUG nova.network.neutron [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Successfully created port: 25bf5361-8992-425f-8d46-f45064536466 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1085.293753] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd2e013-3e45-4137-afd9-cad908deaf20 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.301573] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb9c67aa-eb62-480d-b7b1-63674b9c286d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.307129] env[69475]: DEBUG nova.network.neutron [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1085.337733] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80b28fc5-c8a9-4e44-bf66-4ecf4c5c1058 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.347514] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-086279f4-4650-43ba-82ba-2f3c1dc0cd4c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.360791] env[69475]: DEBUG nova.compute.provider_tree [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1085.547302] env[69475]: DEBUG oslo_vmware.api [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508824, 'name': PowerOnVM_Task, 'duration_secs': 0.448533} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.547568] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1085.547767] env[69475]: INFO nova.compute.manager [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Took 7.69 seconds to spawn the instance on the hypervisor. [ 1085.547951] env[69475]: DEBUG nova.compute.manager [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1085.548773] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-376a0693-0fcf-4ed0-87cc-07c9fffbe2bc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.586892] env[69475]: DEBUG nova.network.neutron [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Updating instance_info_cache with network_info: [{"id": "b2b04f22-0a1e-4c90-b84f-5d119fc7e528", "address": "fa:16:3e:2e:52:22", "network": {"id": "a5ecc342-ff26-4d68-9810-30407b73463d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-941429832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a68f54aa603f46468f50c83cd4fa3e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2b04f22-0a", "ovs_interfaceid": "b2b04f22-0a1e-4c90-b84f-5d119fc7e528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.614856] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8019fc-76ce-4078-9ff2-e1c93dad968b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.643067] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f381a101-cf8e-4b20-afa1-b673028d9419 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.649536] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1085.649536] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52201df1-eaa9-58e9-4930-280114ed4b79" [ 1085.649536] env[69475]: _type = "HttpNfcLease" [ 1085.649536] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1085.651831] env[69475]: DEBUG oslo_vmware.rw_handles [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1085.651831] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52201df1-eaa9-58e9-4930-280114ed4b79" [ 1085.651831] env[69475]: _type = "HttpNfcLease" [ 1085.651831] env[69475]: }. {{(pid=69475) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1085.652234] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating instance 'baf27027-678d-4167-bb9b-df410aeb0e82' progress to 83 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1085.656156] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ed031f-f47f-4d59-bd1c-3d433c3efbaf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.663561] env[69475]: DEBUG oslo_vmware.rw_handles [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52149242-28d5-1aea-c9cc-6e944d8afee3/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1085.663743] env[69475]: DEBUG oslo_vmware.rw_handles [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Creating HTTP connection to write to file with size = 31664640 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52149242-28d5-1aea-c9cc-6e944d8afee3/disk-0.vmdk. {{(pid=69475) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1085.727580] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-430a6756-3ec3-47eb-b90a-42b666762a62 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.863913] env[69475]: DEBUG nova.scheduler.client.report [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1085.875297] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "interface-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-9d4929eb-bec9-43f8-9341-df239fb9a0a7" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.875532] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-9d4929eb-bec9-43f8-9341-df239fb9a0a7" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.875895] env[69475]: DEBUG nova.objects.instance [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lazy-loading 'flavor' on Instance uuid 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1086.008382] env[69475]: DEBUG nova.compute.manager [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1086.034458] env[69475]: DEBUG nova.virt.hardware [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1086.034709] env[69475]: DEBUG nova.virt.hardware [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1086.034978] env[69475]: DEBUG nova.virt.hardware [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1086.035056] env[69475]: DEBUG nova.virt.hardware [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1086.035193] env[69475]: DEBUG nova.virt.hardware [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1086.035341] env[69475]: DEBUG nova.virt.hardware [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1086.035544] env[69475]: DEBUG nova.virt.hardware [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1086.035698] env[69475]: DEBUG nova.virt.hardware [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1086.035876] env[69475]: DEBUG nova.virt.hardware [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1086.036861] env[69475]: DEBUG nova.virt.hardware [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1086.036861] env[69475]: DEBUG nova.virt.hardware [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1086.037722] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a47715e-ac91-420f-85ec-2a37ba53d289 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.050929] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a152343-4691-4d91-aa59-933a95a34198 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.076123] env[69475]: INFO nova.compute.manager [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Took 18.98 seconds to build instance. [ 1086.089297] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Releasing lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1086.090193] env[69475]: DEBUG nova.compute.manager [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Instance network_info: |[{"id": "b2b04f22-0a1e-4c90-b84f-5d119fc7e528", "address": "fa:16:3e:2e:52:22", "network": {"id": "a5ecc342-ff26-4d68-9810-30407b73463d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-941429832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a68f54aa603f46468f50c83cd4fa3e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2b04f22-0a", "ovs_interfaceid": "b2b04f22-0a1e-4c90-b84f-5d119fc7e528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1086.090193] env[69475]: DEBUG oslo_concurrency.lockutils [req-5bd5a4a3-993a-49dc-8138-9ab85717aa46 req-a05b6bf0-5e9c-4d4e-ad9d-48ac249b839f service nova] Acquired lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.090789] env[69475]: DEBUG nova.network.neutron [req-5bd5a4a3-993a-49dc-8138-9ab85717aa46 req-a05b6bf0-5e9c-4d4e-ad9d-48ac249b839f service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Refreshing network info cache for port b2b04f22-0a1e-4c90-b84f-5d119fc7e528 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1086.092571] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:52:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '84aee122-f630-43c5-9cc1-3a38d3819c82', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2b04f22-0a1e-4c90-b84f-5d119fc7e528', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1086.101674] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1086.109834] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1086.115504] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8ce722eb-c964-4cde-bdb8-ad67efe38550 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.142590] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1086.142590] env[69475]: value = "task-3508825" [ 1086.142590] env[69475]: _type = "Task" [ 1086.142590] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.156772] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508825, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.160607] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1086.160909] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2fcc3eb0-f3cd-4d81-b612-a2b125e9cf74 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.168639] env[69475]: DEBUG oslo_vmware.api [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1086.168639] env[69475]: value = "task-3508826" [ 1086.168639] env[69475]: _type = "Task" [ 1086.168639] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.185162] env[69475]: DEBUG oslo_vmware.api [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508826, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.369012] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.383s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.369490] env[69475]: DEBUG nova.compute.manager [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1086.372271] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.111s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.373639] env[69475]: INFO nova.compute.claims [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1086.445087] env[69475]: DEBUG nova.network.neutron [req-5bd5a4a3-993a-49dc-8138-9ab85717aa46 req-a05b6bf0-5e9c-4d4e-ad9d-48ac249b839f service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Updated VIF entry in instance network info cache for port b2b04f22-0a1e-4c90-b84f-5d119fc7e528. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1086.445570] env[69475]: DEBUG nova.network.neutron [req-5bd5a4a3-993a-49dc-8138-9ab85717aa46 req-a05b6bf0-5e9c-4d4e-ad9d-48ac249b839f service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Updating instance_info_cache with network_info: [{"id": "b2b04f22-0a1e-4c90-b84f-5d119fc7e528", "address": "fa:16:3e:2e:52:22", "network": {"id": "a5ecc342-ff26-4d68-9810-30407b73463d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-941429832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a68f54aa603f46468f50c83cd4fa3e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2b04f22-0a", "ovs_interfaceid": "b2b04f22-0a1e-4c90-b84f-5d119fc7e528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1086.508619] env[69475]: DEBUG nova.objects.instance [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lazy-loading 'pci_requests' on Instance uuid 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1086.578398] env[69475]: DEBUG oslo_concurrency.lockutils [None req-36219a49-3e6f-42ed-963d-6a23eac0df48 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.496s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.615213] env[69475]: DEBUG oslo_concurrency.lockutils [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "20b37e69-5870-4f63-aeba-9293615da478" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.615680] env[69475]: DEBUG oslo_concurrency.lockutils [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "20b37e69-5870-4f63-aeba-9293615da478" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.615873] env[69475]: DEBUG oslo_concurrency.lockutils [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "20b37e69-5870-4f63-aeba-9293615da478-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1086.616558] env[69475]: DEBUG oslo_concurrency.lockutils [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "20b37e69-5870-4f63-aeba-9293615da478-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1086.616558] env[69475]: DEBUG oslo_concurrency.lockutils [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "20b37e69-5870-4f63-aeba-9293615da478-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.619193] env[69475]: INFO nova.compute.manager [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Terminating instance [ 1086.659008] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508825, 'name': CreateVM_Task, 'duration_secs': 0.474601} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.661337] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1086.662334] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.662532] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.663725] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1086.663725] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7eaba88-9191-491b-acc4-be6bd21f542a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.670016] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1086.670016] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5268fae7-649e-78fc-8961-b1b548871cfe" [ 1086.670016] env[69475]: _type = "Task" [ 1086.670016] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.695123] env[69475]: DEBUG oslo_vmware.api [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508826, 'name': PowerOnVM_Task, 'duration_secs': 0.436392} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.695484] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5268fae7-649e-78fc-8961-b1b548871cfe, 'name': SearchDatastore_Task, 'duration_secs': 0.016989} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.695792] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1086.696052] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e26bda99-9bd5-467d-84ff-be3aaeba5fd4 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating instance 'baf27027-678d-4167-bb9b-df410aeb0e82' progress to 100 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1086.701827] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1086.701827] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1086.701827] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.702078] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.702244] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1086.702849] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a9b4e08d-54a4-425a-8455-32b366a3d267 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.712897] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1086.713169] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1086.714246] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff9afc19-3c8e-4aa0-b282-32fd920271a8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.721357] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1086.721357] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f06e4-38d6-51b0-cfbb-dc8dbe441a13" [ 1086.721357] env[69475]: _type = "Task" [ 1086.721357] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.729818] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f06e4-38d6-51b0-cfbb-dc8dbe441a13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.882426] env[69475]: DEBUG nova.compute.utils [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1086.884757] env[69475]: DEBUG nova.compute.manager [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1086.889038] env[69475]: DEBUG nova.network.neutron [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1086.948765] env[69475]: DEBUG oslo_concurrency.lockutils [req-5bd5a4a3-993a-49dc-8138-9ab85717aa46 req-a05b6bf0-5e9c-4d4e-ad9d-48ac249b839f service nova] Releasing lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1086.954526] env[69475]: DEBUG nova.policy [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a123051be3624b50ab42a4254f687767', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca5098b4aae94c08b3f8ffd66aae2e2c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1086.975954] env[69475]: DEBUG oslo_vmware.rw_handles [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Completed reading data from the image iterator. {{(pid=69475) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1086.976242] env[69475]: DEBUG oslo_vmware.rw_handles [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52149242-28d5-1aea-c9cc-6e944d8afee3/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1086.977309] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f8ea132-7aaf-4ac8-bee4-962ffd13352c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.984911] env[69475]: DEBUG oslo_vmware.rw_handles [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52149242-28d5-1aea-c9cc-6e944d8afee3/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1086.985082] env[69475]: DEBUG oslo_vmware.rw_handles [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52149242-28d5-1aea-c9cc-6e944d8afee3/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1086.985319] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-84450cf6-16b0-43b3-8066-774f77fc5383 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.013235] env[69475]: DEBUG nova.objects.base [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Object Instance<1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35> lazy-loaded attributes: flavor,pci_requests {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1087.013524] env[69475]: DEBUG nova.network.neutron [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1087.125996] env[69475]: DEBUG nova.compute.manager [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1087.126803] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1087.127692] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93243830-5e63-4151-92b3-97a9e561a10d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.139566] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1087.139772] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-408b312e-589a-4366-96f1-e947df8edffb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.146719] env[69475]: DEBUG oslo_vmware.api [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1087.146719] env[69475]: value = "task-3508827" [ 1087.146719] env[69475]: _type = "Task" [ 1087.146719] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.155895] env[69475]: DEBUG oslo_vmware.api [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508827, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.158385] env[69475]: DEBUG nova.policy [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc345af1a2c34fba98fa191b637a284a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2ba1a4125454d39bc92b6123447d98a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1087.237304] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f06e4-38d6-51b0-cfbb-dc8dbe441a13, 'name': SearchDatastore_Task, 'duration_secs': 0.018966} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.238746] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef306e0f-9bba-4091-bdd9-0a98fd1923b5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.246101] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1087.246101] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52094a5c-74d9-cb06-d906-415fde95b19f" [ 1087.246101] env[69475]: _type = "Task" [ 1087.246101] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.256611] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52094a5c-74d9-cb06-d906-415fde95b19f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.274802] env[69475]: DEBUG nova.network.neutron [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Successfully created port: 81121438-ec92-4519-97f1-e2a871109623 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1087.382134] env[69475]: DEBUG nova.compute.manager [req-f3cf55cc-e52f-49d7-be98-81fadeda410c req-b575db45-0069-4a1e-88a2-49d8a2729766 service nova] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Received event network-vif-plugged-25bf5361-8992-425f-8d46-f45064536466 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1087.383910] env[69475]: DEBUG oslo_concurrency.lockutils [req-f3cf55cc-e52f-49d7-be98-81fadeda410c req-b575db45-0069-4a1e-88a2-49d8a2729766 service nova] Acquiring lock "60516e16-bd7e-4fc1-b95f-603fb5ef6ae9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.383910] env[69475]: DEBUG oslo_concurrency.lockutils [req-f3cf55cc-e52f-49d7-be98-81fadeda410c req-b575db45-0069-4a1e-88a2-49d8a2729766 service nova] Lock "60516e16-bd7e-4fc1-b95f-603fb5ef6ae9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.383910] env[69475]: DEBUG oslo_concurrency.lockutils [req-f3cf55cc-e52f-49d7-be98-81fadeda410c req-b575db45-0069-4a1e-88a2-49d8a2729766 service nova] Lock "60516e16-bd7e-4fc1-b95f-603fb5ef6ae9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.383910] env[69475]: DEBUG nova.compute.manager [req-f3cf55cc-e52f-49d7-be98-81fadeda410c req-b575db45-0069-4a1e-88a2-49d8a2729766 service nova] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] No waiting events found dispatching network-vif-plugged-25bf5361-8992-425f-8d46-f45064536466 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1087.383910] env[69475]: WARNING nova.compute.manager [req-f3cf55cc-e52f-49d7-be98-81fadeda410c req-b575db45-0069-4a1e-88a2-49d8a2729766 service nova] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Received unexpected event network-vif-plugged-25bf5361-8992-425f-8d46-f45064536466 for instance with vm_state building and task_state spawning. [ 1087.400736] env[69475]: DEBUG nova.compute.manager [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1087.576882] env[69475]: DEBUG oslo_vmware.rw_handles [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52149242-28d5-1aea-c9cc-6e944d8afee3/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1087.580021] env[69475]: INFO nova.virt.vmwareapi.images [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Downloaded image file data a9351d13-720c-49e6-a8e9-3fac7da2b98a [ 1087.580021] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8942ff4a-b1cd-4654-a180-868233344aa6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.595730] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8c855ffb-905c-4b9e-ab0b-37bcbc05d2bc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.624826] env[69475]: INFO nova.virt.vmwareapi.images [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] The imported VM was unregistered [ 1087.627542] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Caching image {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1087.627542] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Creating directory with path [datastore1] devstack-image-cache_base/a9351d13-720c-49e6-a8e9-3fac7da2b98a {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1087.627776] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87890294-36ba-4e7e-90d6-64972f2e9fd0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.642382] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Created directory with path [datastore1] devstack-image-cache_base/a9351d13-720c-49e6-a8e9-3fac7da2b98a {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1087.642650] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_60295392-277f-4bb5-842c-e2b626ff7dd3/OSTACK_IMG_60295392-277f-4bb5-842c-e2b626ff7dd3.vmdk to [datastore1] devstack-image-cache_base/a9351d13-720c-49e6-a8e9-3fac7da2b98a/a9351d13-720c-49e6-a8e9-3fac7da2b98a.vmdk. {{(pid=69475) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1087.642940] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-2ed0b66d-b400-45b4-a471-0a72e5bae688 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.654892] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1087.654892] env[69475]: value = "task-3508829" [ 1087.654892] env[69475]: _type = "Task" [ 1087.654892] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.661965] env[69475]: DEBUG oslo_vmware.api [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508827, 'name': PowerOffVM_Task, 'duration_secs': 0.409397} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.662643] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1087.663161] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1087.663161] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c85c02b6-5794-4227-a9d3-6bf987adb816 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.667565] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508829, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.711618] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57cf157a-7594-4387-b7e4-94732bd4f2be {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.719396] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb007d74-aad5-4810-a0df-f50039f3d7f8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.753786] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-029ac36a-cd56-4126-9619-ea3886b0daf6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.756278] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1087.756526] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1087.756757] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Deleting the datastore file [datastore2] 20b37e69-5870-4f63-aeba-9293615da478 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1087.757219] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9fbe18b1-6f19-4e40-b6df-5b1da5e8f87b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.765952] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52094a5c-74d9-cb06-d906-415fde95b19f, 'name': SearchDatastore_Task, 'duration_secs': 0.01263} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.767966] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2208904-20ff-4511-9e16-c72d9f9a8992 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.771877] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.772189] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 92020fc6-aff6-437f-9e26-a5b61ea7e76f/92020fc6-aff6-437f-9e26-a5b61ea7e76f.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1087.772531] env[69475]: DEBUG oslo_vmware.api [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1087.772531] env[69475]: value = "task-3508831" [ 1087.772531] env[69475]: _type = "Task" [ 1087.772531] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.773478] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de50290b-6d30-4717-9e48-c32b15e418e5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.788066] env[69475]: DEBUG nova.compute.provider_tree [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1087.791124] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1087.791124] env[69475]: value = "task-3508832" [ 1087.791124] env[69475]: _type = "Task" [ 1087.791124] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.794274] env[69475]: DEBUG oslo_vmware.api [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508831, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.802210] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508832, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.921314] env[69475]: DEBUG nova.network.neutron [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Successfully updated port: 25bf5361-8992-425f-8d46-f45064536466 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1088.165861] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508829, 'name': MoveVirtualDisk_Task} progress is 12%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.287017] env[69475]: DEBUG oslo_vmware.api [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508831, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.33215} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.288470] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1088.288658] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1088.288828] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1088.288991] env[69475]: INFO nova.compute.manager [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1088.289244] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1088.290323] env[69475]: DEBUG nova.compute.manager [req-311ce3b4-b5fa-47b5-8974-99a1451b6ce4 req-34d3cd41-d341-42b6-b93d-df6739c3a197 service nova] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Received event network-changed-25bf5361-8992-425f-8d46-f45064536466 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1088.292023] env[69475]: DEBUG nova.compute.manager [req-311ce3b4-b5fa-47b5-8974-99a1451b6ce4 req-34d3cd41-d341-42b6-b93d-df6739c3a197 service nova] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Refreshing instance network info cache due to event network-changed-25bf5361-8992-425f-8d46-f45064536466. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1088.292023] env[69475]: DEBUG oslo_concurrency.lockutils [req-311ce3b4-b5fa-47b5-8974-99a1451b6ce4 req-34d3cd41-d341-42b6-b93d-df6739c3a197 service nova] Acquiring lock "refresh_cache-60516e16-bd7e-4fc1-b95f-603fb5ef6ae9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.292023] env[69475]: DEBUG oslo_concurrency.lockutils [req-311ce3b4-b5fa-47b5-8974-99a1451b6ce4 req-34d3cd41-d341-42b6-b93d-df6739c3a197 service nova] Acquired lock "refresh_cache-60516e16-bd7e-4fc1-b95f-603fb5ef6ae9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.292023] env[69475]: DEBUG nova.network.neutron [req-311ce3b4-b5fa-47b5-8974-99a1451b6ce4 req-34d3cd41-d341-42b6-b93d-df6739c3a197 service nova] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Refreshing network info cache for port 25bf5361-8992-425f-8d46-f45064536466 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1088.292234] env[69475]: DEBUG nova.compute.manager [-] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1088.292234] env[69475]: DEBUG nova.network.neutron [-] [instance: 20b37e69-5870-4f63-aeba-9293615da478] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1088.294820] env[69475]: DEBUG nova.scheduler.client.report [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1088.309349] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508832, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.412527] env[69475]: DEBUG nova.compute.manager [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1088.425709] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Acquiring lock "refresh_cache-60516e16-bd7e-4fc1-b95f-603fb5ef6ae9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.448337] env[69475]: DEBUG nova.virt.hardware [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1088.448465] env[69475]: DEBUG nova.virt.hardware [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1088.448608] env[69475]: DEBUG nova.virt.hardware [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1088.448787] env[69475]: DEBUG nova.virt.hardware [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1088.448928] env[69475]: DEBUG nova.virt.hardware [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1088.449147] env[69475]: DEBUG nova.virt.hardware [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1088.449406] env[69475]: DEBUG nova.virt.hardware [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1088.449534] env[69475]: DEBUG nova.virt.hardware [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1088.449700] env[69475]: DEBUG nova.virt.hardware [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1088.449867] env[69475]: DEBUG nova.virt.hardware [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1088.450208] env[69475]: DEBUG nova.virt.hardware [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1088.451451] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee5342d-1b71-498e-b942-0610bc7b384c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.459613] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-703d703f-0295-4109-b275-9532c92b4f32 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.668630] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508829, 'name': MoveVirtualDisk_Task} progress is 32%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.803959] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.432s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.804858] env[69475]: DEBUG nova.compute.manager [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1088.810935] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.739s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.811325] env[69475]: DEBUG nova.objects.instance [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lazy-loading 'resources' on Instance uuid 226afd68-34d8-482e-89f9-0c45a300a803 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1088.821258] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508832, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.860100] env[69475]: DEBUG nova.network.neutron [req-311ce3b4-b5fa-47b5-8974-99a1451b6ce4 req-34d3cd41-d341-42b6-b93d-df6739c3a197 service nova] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1088.937393] env[69475]: DEBUG nova.network.neutron [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Successfully updated port: 81121438-ec92-4519-97f1-e2a871109623 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1088.980614] env[69475]: DEBUG nova.network.neutron [req-311ce3b4-b5fa-47b5-8974-99a1451b6ce4 req-34d3cd41-d341-42b6-b93d-df6739c3a197 service nova] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.053467] env[69475]: DEBUG nova.compute.manager [req-42ae838b-48fe-4ab4-a735-ee84e02a1c8c req-de46e600-7d41-4211-b685-e19311d86e2b service nova] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Received event network-vif-plugged-81121438-ec92-4519-97f1-e2a871109623 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1089.053467] env[69475]: DEBUG oslo_concurrency.lockutils [req-42ae838b-48fe-4ab4-a735-ee84e02a1c8c req-de46e600-7d41-4211-b685-e19311d86e2b service nova] Acquiring lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.053822] env[69475]: DEBUG oslo_concurrency.lockutils [req-42ae838b-48fe-4ab4-a735-ee84e02a1c8c req-de46e600-7d41-4211-b685-e19311d86e2b service nova] Lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.053822] env[69475]: DEBUG oslo_concurrency.lockutils [req-42ae838b-48fe-4ab4-a735-ee84e02a1c8c req-de46e600-7d41-4211-b685-e19311d86e2b service nova] Lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.054679] env[69475]: DEBUG nova.compute.manager [req-42ae838b-48fe-4ab4-a735-ee84e02a1c8c req-de46e600-7d41-4211-b685-e19311d86e2b service nova] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] No waiting events found dispatching network-vif-plugged-81121438-ec92-4519-97f1-e2a871109623 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1089.056121] env[69475]: WARNING nova.compute.manager [req-42ae838b-48fe-4ab4-a735-ee84e02a1c8c req-de46e600-7d41-4211-b685-e19311d86e2b service nova] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Received unexpected event network-vif-plugged-81121438-ec92-4519-97f1-e2a871109623 for instance with vm_state building and task_state spawning. [ 1089.167806] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508829, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.284309] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "baf27027-678d-4167-bb9b-df410aeb0e82" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.284594] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "baf27027-678d-4167-bb9b-df410aeb0e82" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.284784] env[69475]: DEBUG nova.compute.manager [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Going to confirm migration 6 {{(pid=69475) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1089.315531] env[69475]: DEBUG nova.compute.utils [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1089.320388] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508832, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.322180] env[69475]: DEBUG nova.compute.manager [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1089.322180] env[69475]: DEBUG nova.network.neutron [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1089.341594] env[69475]: DEBUG nova.network.neutron [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Successfully updated port: 9d4929eb-bec9-43f8-9341-df239fb9a0a7 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1089.382240] env[69475]: DEBUG nova.policy [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '50223677b1b84004ad2ae335882b0bf2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52941494ff1643f6bb75cc1320a86b88', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1089.444490] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.445706] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.445706] env[69475]: DEBUG nova.network.neutron [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1089.473619] env[69475]: DEBUG nova.network.neutron [-] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.484768] env[69475]: DEBUG oslo_concurrency.lockutils [req-311ce3b4-b5fa-47b5-8974-99a1451b6ce4 req-34d3cd41-d341-42b6-b93d-df6739c3a197 service nova] Releasing lock "refresh_cache-60516e16-bd7e-4fc1-b95f-603fb5ef6ae9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.485282] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Acquired lock "refresh_cache-60516e16-bd7e-4fc1-b95f-603fb5ef6ae9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.485459] env[69475]: DEBUG nova.network.neutron [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1089.665658] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c8aa1f-77ec-48f9-9c96-9fd2d7fe0adc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.682836] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508829, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.684015] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96fe69a-4c52-44e2-972d-bff2e80c6db3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.717637] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93284ad-1d85-4f1a-b67c-94010a55c595 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.729017] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a057407-58ad-4567-bc64-e5c53386ac62 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.745785] env[69475]: DEBUG nova.compute.provider_tree [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1089.762215] env[69475]: DEBUG nova.network.neutron [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Successfully created port: cfc6e6cb-798d-4b99-8764-5faf560ca662 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1089.816546] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508832, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.664687} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.816917] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 92020fc6-aff6-437f-9e26-a5b61ea7e76f/92020fc6-aff6-437f-9e26-a5b61ea7e76f.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1089.817163] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1089.817438] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-513da736-6fbb-49e2-900b-406afcb0f3fd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.821550] env[69475]: DEBUG nova.compute.manager [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1089.826656] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1089.826656] env[69475]: value = "task-3508833" [ 1089.826656] env[69475]: _type = "Task" [ 1089.826656] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.839218] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508833, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.846141] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.846430] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.846624] env[69475]: DEBUG nova.network.neutron [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1089.856844] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.857112] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.857287] env[69475]: DEBUG nova.network.neutron [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1089.857470] env[69475]: DEBUG nova.objects.instance [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lazy-loading 'info_cache' on Instance uuid baf27027-678d-4167-bb9b-df410aeb0e82 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.863625] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "4f091501-351c-45b8-9f64-4d28d4623df8" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.863847] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "4f091501-351c-45b8-9f64-4d28d4623df8" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.977253] env[69475]: DEBUG nova.network.neutron [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1089.979605] env[69475]: INFO nova.compute.manager [-] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Took 1.69 seconds to deallocate network for instance. [ 1090.045706] env[69475]: DEBUG nova.network.neutron [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1090.174716] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508829, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.220804] env[69475]: DEBUG nova.network.neutron [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating instance_info_cache with network_info: [{"id": "81121438-ec92-4519-97f1-e2a871109623", "address": "fa:16:3e:98:9c:8a", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81121438-ec", "ovs_interfaceid": "81121438-ec92-4519-97f1-e2a871109623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.249140] env[69475]: DEBUG nova.scheduler.client.report [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1090.253711] env[69475]: DEBUG nova.network.neutron [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Updating instance_info_cache with network_info: [{"id": "25bf5361-8992-425f-8d46-f45064536466", "address": "fa:16:3e:41:2c:68", "network": {"id": "0ea19f33-032e-4901-9bb9-96881388110d", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-825497652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "79b83e6128844b2eae71ecc046ff483f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25bf5361-89", "ovs_interfaceid": "25bf5361-8992-425f-8d46-f45064536466", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.340192] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508833, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.227376} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.340588] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1090.341494] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-693ed0ea-e34c-4189-879c-2ea310fdc7e2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.369111] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 92020fc6-aff6-437f-9e26-a5b61ea7e76f/92020fc6-aff6-437f-9e26-a5b61ea7e76f.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1090.373086] env[69475]: DEBUG nova.compute.utils [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1090.373709] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d75d59a1-e852-4461-92fb-62685c8d86fe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.399029] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1090.399029] env[69475]: value = "task-3508834" [ 1090.399029] env[69475]: _type = "Task" [ 1090.399029] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.410238] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508834, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.426114] env[69475]: WARNING nova.network.neutron [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] 801aee55-f715-4cdf-b89c-184ca3f24866 already exists in list: networks containing: ['801aee55-f715-4cdf-b89c-184ca3f24866']. ignoring it [ 1090.426320] env[69475]: WARNING nova.network.neutron [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] 801aee55-f715-4cdf-b89c-184ca3f24866 already exists in list: networks containing: ['801aee55-f715-4cdf-b89c-184ca3f24866']. ignoring it [ 1090.486378] env[69475]: DEBUG oslo_concurrency.lockutils [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.668415] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508829, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.723974] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.724218] env[69475]: DEBUG nova.compute.manager [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Instance network_info: |[{"id": "81121438-ec92-4519-97f1-e2a871109623", "address": "fa:16:3e:98:9c:8a", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81121438-ec", "ovs_interfaceid": "81121438-ec92-4519-97f1-e2a871109623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1090.724671] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:98:9c:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '81121438-ec92-4519-97f1-e2a871109623', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1090.732223] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1090.732748] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1090.733260] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1fc07d80-d303-4bed-b90d-ac1b828059a2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.752523] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1090.752523] env[69475]: value = "task-3508835" [ 1090.752523] env[69475]: _type = "Task" [ 1090.752523] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.756406] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.945s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.758564] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Releasing lock "refresh_cache-60516e16-bd7e-4fc1-b95f-603fb5ef6ae9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.758889] env[69475]: DEBUG nova.compute.manager [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Instance network_info: |[{"id": "25bf5361-8992-425f-8d46-f45064536466", "address": "fa:16:3e:41:2c:68", "network": {"id": "0ea19f33-032e-4901-9bb9-96881388110d", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-825497652-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "79b83e6128844b2eae71ecc046ff483f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "61a172ee-af3f-473e-b12a-3fee5bf39c8d", "external-id": "nsx-vlan-transportzone-997", "segmentation_id": 997, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap25bf5361-89", "ovs_interfaceid": "25bf5361-8992-425f-8d46-f45064536466", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1090.759436] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.320s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.759612] env[69475]: DEBUG nova.objects.instance [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69475) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1090.762095] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:2c:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '61a172ee-af3f-473e-b12a-3fee5bf39c8d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '25bf5361-8992-425f-8d46-f45064536466', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1090.770022] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Creating folder: Project (79b83e6128844b2eae71ecc046ff483f). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1090.773426] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9a60dd3b-0643-4a4d-8aa2-df8992ae7fa8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.779365] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508835, 'name': CreateVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.783181] env[69475]: INFO nova.scheduler.client.report [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleted allocations for instance 226afd68-34d8-482e-89f9-0c45a300a803 [ 1090.790655] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Created folder: Project (79b83e6128844b2eae71ecc046ff483f) in parent group-v700823. [ 1090.790899] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Creating folder: Instances. Parent ref: group-v701113. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1090.792515] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6da0e121-102e-499e-bcfe-6b021ca27442 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.800994] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Created folder: Instances in parent group-v701113. [ 1090.801382] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1090.802089] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1090.802312] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-533b04e2-948c-4049-82f9-f4b9f527873c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.821688] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1090.821688] env[69475]: value = "task-3508838" [ 1090.821688] env[69475]: _type = "Task" [ 1090.821688] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.834431] env[69475]: DEBUG nova.compute.manager [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1090.836585] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508838, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.864686] env[69475]: DEBUG nova.virt.hardware [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1090.864997] env[69475]: DEBUG nova.virt.hardware [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1090.865267] env[69475]: DEBUG nova.virt.hardware [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1090.865456] env[69475]: DEBUG nova.virt.hardware [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1090.865744] env[69475]: DEBUG nova.virt.hardware [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1090.865948] env[69475]: DEBUG nova.virt.hardware [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1090.866227] env[69475]: DEBUG nova.virt.hardware [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1090.866499] env[69475]: DEBUG nova.virt.hardware [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1090.866644] env[69475]: DEBUG nova.virt.hardware [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1090.866813] env[69475]: DEBUG nova.virt.hardware [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1090.867094] env[69475]: DEBUG nova.virt.hardware [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1090.867989] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40f618d2-e616-4d8f-a86a-4889ccc74734 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.874354] env[69475]: DEBUG nova.network.neutron [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Updating instance_info_cache with network_info: [{"id": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "address": "fa:16:3e:34:5a:a2", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d51ee71-84", "ovs_interfaceid": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e60a34f2-9926-41dc-a777-3d0e92f22ce9", "address": "fa:16:3e:1b:e3:de", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape60a34f2-99", "ovs_interfaceid": "e60a34f2-9926-41dc-a777-3d0e92f22ce9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9d4929eb-bec9-43f8-9341-df239fb9a0a7", "address": "fa:16:3e:8b:2b:05", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d4929eb-be", "ovs_interfaceid": "9d4929eb-bec9-43f8-9341-df239fb9a0a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.881970] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab08e6f4-d740-4be2-aeac-a058518ecd06 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.900742] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "4f091501-351c-45b8-9f64-4d28d4623df8" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.037s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.912362] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508834, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.118568] env[69475]: DEBUG nova.network.neutron [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating instance_info_cache with network_info: [{"id": "4059da75-efc8-42ee-90b1-8202220d1621", "address": "fa:16:3e:1e:8b:99", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4059da75-ef", "ovs_interfaceid": "4059da75-efc8-42ee-90b1-8202220d1621", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1091.170468] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508829, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.198812} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.170801] env[69475]: INFO nova.virt.vmwareapi.ds_util [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_60295392-277f-4bb5-842c-e2b626ff7dd3/OSTACK_IMG_60295392-277f-4bb5-842c-e2b626ff7dd3.vmdk to [datastore1] devstack-image-cache_base/a9351d13-720c-49e6-a8e9-3fac7da2b98a/a9351d13-720c-49e6-a8e9-3fac7da2b98a.vmdk. [ 1091.170994] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Cleaning up location [datastore1] OSTACK_IMG_60295392-277f-4bb5-842c-e2b626ff7dd3 {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1091.171175] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_60295392-277f-4bb5-842c-e2b626ff7dd3 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1091.171424] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aacfb478-0765-47f1-a3ff-773121a9eb11 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.177031] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1091.177031] env[69475]: value = "task-3508839" [ 1091.177031] env[69475]: _type = "Task" [ 1091.177031] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.184666] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508839, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.259753] env[69475]: DEBUG nova.network.neutron [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Successfully updated port: cfc6e6cb-798d-4b99-8764-5faf560ca662 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1091.264747] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508835, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.291974] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49cc696b-8044-4bbc-9371-5db26d9eedef tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "226afd68-34d8-482e-89f9-0c45a300a803" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.099s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.334122] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508838, 'name': CreateVM_Task, 'duration_secs': 0.375327} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.334308] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1091.335019] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.335225] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1091.335587] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1091.335853] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7af0e42f-4da9-42fd-b019-8814e000dc89 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.340925] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Waiting for the task: (returnval){ [ 1091.340925] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52817217-5701-c04d-22fc-b645fe59140f" [ 1091.340925] env[69475]: _type = "Task" [ 1091.340925] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.348249] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52817217-5701-c04d-22fc-b645fe59140f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.377435] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.377979] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.378156] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1091.379170] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c580efb-6c53-4a5a-bba3-98706ee0169d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.396407] env[69475]: DEBUG nova.virt.hardware [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1091.396647] env[69475]: DEBUG nova.virt.hardware [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1091.396831] env[69475]: DEBUG nova.virt.hardware [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1091.397029] env[69475]: DEBUG nova.virt.hardware [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1091.397181] env[69475]: DEBUG nova.virt.hardware [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1091.397329] env[69475]: DEBUG nova.virt.hardware [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1091.397531] env[69475]: DEBUG nova.virt.hardware [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1091.397686] env[69475]: DEBUG nova.virt.hardware [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1091.397853] env[69475]: DEBUG nova.virt.hardware [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1091.398043] env[69475]: DEBUG nova.virt.hardware [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1091.398229] env[69475]: DEBUG nova.virt.hardware [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1091.404638] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Reconfiguring VM to attach interface {{(pid=69475) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1091.405037] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-167300f9-97cd-46c6-9b10-c0988e2ad339 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.426855] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508834, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.428139] env[69475]: DEBUG oslo_vmware.api [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1091.428139] env[69475]: value = "task-3508840" [ 1091.428139] env[69475]: _type = "Task" [ 1091.428139] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.439852] env[69475]: DEBUG oslo_vmware.api [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508840, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.621059] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "refresh_cache-baf27027-678d-4167-bb9b-df410aeb0e82" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.621377] env[69475]: DEBUG nova.objects.instance [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lazy-loading 'migration_context' on Instance uuid baf27027-678d-4167-bb9b-df410aeb0e82 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1091.687293] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508839, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158573} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.687561] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1091.687729] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a9351d13-720c-49e6-a8e9-3fac7da2b98a/a9351d13-720c-49e6-a8e9-3fac7da2b98a.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.687973] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a9351d13-720c-49e6-a8e9-3fac7da2b98a/a9351d13-720c-49e6-a8e9-3fac7da2b98a.vmdk to [datastore1] 8f18d683-7734-4798-8963-7336fe229f16/8f18d683-7734-4798-8963-7336fe229f16.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1091.688244] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3011d0bb-35d7-4493-94b8-53f8d76a5441 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.694631] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1091.694631] env[69475]: value = "task-3508841" [ 1091.694631] env[69475]: _type = "Task" [ 1091.694631] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.702865] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508841, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.766520] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "refresh_cache-96533442-eb53-4bc2-bda3-71efc973d403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.766678] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "refresh_cache-96533442-eb53-4bc2-bda3-71efc973d403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1091.766832] env[69475]: DEBUG nova.network.neutron [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1091.768152] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508835, 'name': CreateVM_Task, 'duration_secs': 0.874684} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.768359] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1091.769364] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.778653] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f780166-e60e-40ff-9ceb-60b71cde84ec tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.779804] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 16.441s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.780766] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.780766] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69475) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1091.780766] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.615s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.780766] env[69475]: DEBUG nova.objects.instance [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Lazy-loading 'resources' on Instance uuid 97013703-3506-4441-b80c-cbb5c7e29bdf {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1091.782964] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09c4bf13-c12f-4c48-a139-6731305d3829 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.792750] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff110d37-591d-42ab-9c7e-01d7d823f977 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.808596] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b85b7e-7a44-4c41-a016-d0613dccf2d5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.816570] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e885cfc7-47e8-4403-83a4-8c5edde1321a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.850948] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179358MB free_disk=89GB free_vcpus=48 pci_devices=None {{(pid=69475) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1091.851157] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.861370] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52817217-5701-c04d-22fc-b645fe59140f, 'name': SearchDatastore_Task, 'duration_secs': 0.053192} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.861691] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1091.861995] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1091.862265] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.862870] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1091.862870] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1091.863018] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1091.863259] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1091.863490] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e04f0ef7-7b87-4d2e-b291-8cdb2cae608e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.865827] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-982a9b7c-6a6f-44d2-a60d-fbac76d94e5a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.871608] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1091.871608] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e6be27-1c0b-e175-5d1e-ecb64bd1d4c1" [ 1091.871608] env[69475]: _type = "Task" [ 1091.871608] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.876176] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1091.876356] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1091.877369] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6054228f-2ed7-4369-8751-8774ba2c33bf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.882787] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e6be27-1c0b-e175-5d1e-ecb64bd1d4c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.886012] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Waiting for the task: (returnval){ [ 1091.886012] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c48a81-1f72-bedf-71c3-e4a90698839c" [ 1091.886012] env[69475]: _type = "Task" [ 1091.886012] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.893823] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c48a81-1f72-bedf-71c3-e4a90698839c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.914324] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508834, 'name': ReconfigVM_Task, 'duration_secs': 1.159338} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.914595] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 92020fc6-aff6-437f-9e26-a5b61ea7e76f/92020fc6-aff6-437f-9e26-a5b61ea7e76f.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1091.915266] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d85ec00b-7971-4a0f-b712-d39b44c8c614 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.922327] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1091.922327] env[69475]: value = "task-3508842" [ 1091.922327] env[69475]: _type = "Task" [ 1091.922327] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.931322] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508842, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.936158] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "4f091501-351c-45b8-9f64-4d28d4623df8" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.936398] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "4f091501-351c-45b8-9f64-4d28d4623df8" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.936627] env[69475]: INFO nova.compute.manager [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Attaching volume 53fb465d-7ef8-4cfc-bb5b-08b0d766b68a to /dev/sdb [ 1091.942445] env[69475]: DEBUG oslo_vmware.api [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508840, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.978039] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4264bea-5d88-49c8-94ae-0fd166e142e3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.986237] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49d4f13a-c8f3-4bf0-8d4e-b9d081838d77 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.000666] env[69475]: DEBUG nova.virt.block_device [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Updating existing volume attachment record: b7ff0c0f-a91d-4306-80d7-011dee9a249b {{(pid=69475) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1092.011187] env[69475]: DEBUG nova.compute.manager [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Received event network-changed-e27cfabc-cd13-4aaa-b9e1-eebffb18225e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1092.011371] env[69475]: DEBUG nova.compute.manager [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Refreshing instance network info cache due to event network-changed-e27cfabc-cd13-4aaa-b9e1-eebffb18225e. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1092.011726] env[69475]: DEBUG oslo_concurrency.lockutils [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] Acquiring lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.011768] env[69475]: DEBUG oslo_concurrency.lockutils [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] Acquired lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1092.011915] env[69475]: DEBUG nova.network.neutron [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Refreshing network info cache for port e27cfabc-cd13-4aaa-b9e1-eebffb18225e {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1092.125072] env[69475]: DEBUG nova.objects.base [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1092.126454] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838d2d33-39ec-40a7-96df-c4897af27e36 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.149523] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a5f031a-e696-4e63-a976-de0c67bcd876 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.156035] env[69475]: DEBUG oslo_vmware.api [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1092.156035] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52612607-0444-0e8d-7759-946b76d40b1c" [ 1092.156035] env[69475]: _type = "Task" [ 1092.156035] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.167021] env[69475]: DEBUG oslo_vmware.api [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52612607-0444-0e8d-7759-946b76d40b1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.203855] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508841, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.305176] env[69475]: DEBUG nova.network.neutron [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1092.310156] env[69475]: DEBUG nova.compute.manager [req-71e9ce7a-22b8-425c-bf96-2ea5ec5983f2 req-b1e7ea7a-d704-43f8-a92c-a70c3e2bd447 service nova] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Received event network-changed-81121438-ec92-4519-97f1-e2a871109623 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1092.310429] env[69475]: DEBUG nova.compute.manager [req-71e9ce7a-22b8-425c-bf96-2ea5ec5983f2 req-b1e7ea7a-d704-43f8-a92c-a70c3e2bd447 service nova] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Refreshing instance network info cache due to event network-changed-81121438-ec92-4519-97f1-e2a871109623. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1092.310738] env[69475]: DEBUG oslo_concurrency.lockutils [req-71e9ce7a-22b8-425c-bf96-2ea5ec5983f2 req-b1e7ea7a-d704-43f8-a92c-a70c3e2bd447 service nova] Acquiring lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.310958] env[69475]: DEBUG oslo_concurrency.lockutils [req-71e9ce7a-22b8-425c-bf96-2ea5ec5983f2 req-b1e7ea7a-d704-43f8-a92c-a70c3e2bd447 service nova] Acquired lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1092.311070] env[69475]: DEBUG nova.network.neutron [req-71e9ce7a-22b8-425c-bf96-2ea5ec5983f2 req-b1e7ea7a-d704-43f8-a92c-a70c3e2bd447 service nova] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Refreshing network info cache for port 81121438-ec92-4519-97f1-e2a871109623 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1092.385322] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e6be27-1c0b-e175-5d1e-ecb64bd1d4c1, 'name': SearchDatastore_Task, 'duration_secs': 0.018232} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.385928] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.385928] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1092.386318] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1092.400690] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c48a81-1f72-bedf-71c3-e4a90698839c, 'name': SearchDatastore_Task, 'duration_secs': 0.008543} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.401537] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39b407ce-62ca-44c6-a931-ff30982a1a33 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.409919] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Waiting for the task: (returnval){ [ 1092.409919] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e813f1-fae3-7764-6ca5-5e5b046f0050" [ 1092.409919] env[69475]: _type = "Task" [ 1092.409919] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.417923] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e813f1-fae3-7764-6ca5-5e5b046f0050, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.430678] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508842, 'name': Rename_Task, 'duration_secs': 0.251127} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.430784] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1092.430991] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f140f84-e946-4f07-a564-aa56b481b0b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.445086] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1092.445086] env[69475]: value = "task-3508844" [ 1092.445086] env[69475]: _type = "Task" [ 1092.445086] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.448586] env[69475]: DEBUG oslo_vmware.api [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508840, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.462582] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508844, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.475265] env[69475]: DEBUG nova.network.neutron [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Updating instance_info_cache with network_info: [{"id": "cfc6e6cb-798d-4b99-8764-5faf560ca662", "address": "fa:16:3e:5b:4e:7b", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfc6e6cb-79", "ovs_interfaceid": "cfc6e6cb-798d-4b99-8764-5faf560ca662", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.595453] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d4e2949-c6bb-4dd3-80cf-1e98ba0d2464 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.604979] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bd5af61-33f5-4c9c-a372-cc1e250dbcf6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.648135] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0217977c-9283-4d3b-9551-8732a7a2da69 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.657840] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf810b3-dd41-498e-94e5-78ced911fb1e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.675205] env[69475]: DEBUG oslo_vmware.api [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52612607-0444-0e8d-7759-946b76d40b1c, 'name': SearchDatastore_Task, 'duration_secs': 0.01288} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.684149] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.684913] env[69475]: DEBUG nova.compute.provider_tree [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1092.709399] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508841, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.817456] env[69475]: DEBUG nova.network.neutron [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Updated VIF entry in instance network info cache for port e27cfabc-cd13-4aaa-b9e1-eebffb18225e. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1092.817872] env[69475]: DEBUG nova.network.neutron [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Updating instance_info_cache with network_info: [{"id": "e27cfabc-cd13-4aaa-b9e1-eebffb18225e", "address": "fa:16:3e:9f:f8:e1", "network": {"id": "62083b30-e966-4802-8960-367f1137a879", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-79648352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572bc56741e24d57a4d01f202c8fb78d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape27cfabc-cd", "ovs_interfaceid": "e27cfabc-cd13-4aaa-b9e1-eebffb18225e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.926630] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e813f1-fae3-7764-6ca5-5e5b046f0050, 'name': SearchDatastore_Task, 'duration_secs': 0.099271} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.926910] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.927193] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9/60516e16-bd7e-4fc1-b95f-603fb5ef6ae9.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1092.927499] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1092.927714] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1092.927924] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2011f069-aabb-4032-a81b-0e206277818c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.930096] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5eed2bd5-2f5c-474b-908c-067f636ed775 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.940618] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Waiting for the task: (returnval){ [ 1092.940618] env[69475]: value = "task-3508847" [ 1092.940618] env[69475]: _type = "Task" [ 1092.940618] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.952084] env[69475]: DEBUG oslo_vmware.api [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508840, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.952421] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1092.952598] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1092.956674] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-211aced5-a849-4d70-8d88-8cd1b0fdb223 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.962460] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508847, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.967691] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508844, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.969169] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1092.969169] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5285985d-b370-0d47-38d1-5e1531ba80af" [ 1092.969169] env[69475]: _type = "Task" [ 1092.969169] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.977069] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5285985d-b370-0d47-38d1-5e1531ba80af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.980105] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "refresh_cache-96533442-eb53-4bc2-bda3-71efc973d403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1092.980419] env[69475]: DEBUG nova.compute.manager [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Instance network_info: |[{"id": "cfc6e6cb-798d-4b99-8764-5faf560ca662", "address": "fa:16:3e:5b:4e:7b", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfc6e6cb-79", "ovs_interfaceid": "cfc6e6cb-798d-4b99-8764-5faf560ca662", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1092.980818] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:4e:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cfc6e6cb-798d-4b99-8764-5faf560ca662', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1092.988559] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1092.989230] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1092.989498] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6033d0b5-17b0-4525-8da6-8864dd11b480 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.012182] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1093.012182] env[69475]: value = "task-3508848" [ 1093.012182] env[69475]: _type = "Task" [ 1093.012182] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.024572] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508848, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.060672] env[69475]: DEBUG nova.network.neutron [req-71e9ce7a-22b8-425c-bf96-2ea5ec5983f2 req-b1e7ea7a-d704-43f8-a92c-a70c3e2bd447 service nova] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updated VIF entry in instance network info cache for port 81121438-ec92-4519-97f1-e2a871109623. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1093.061185] env[69475]: DEBUG nova.network.neutron [req-71e9ce7a-22b8-425c-bf96-2ea5ec5983f2 req-b1e7ea7a-d704-43f8-a92c-a70c3e2bd447 service nova] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating instance_info_cache with network_info: [{"id": "81121438-ec92-4519-97f1-e2a871109623", "address": "fa:16:3e:98:9c:8a", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81121438-ec", "ovs_interfaceid": "81121438-ec92-4519-97f1-e2a871109623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1093.190800] env[69475]: DEBUG nova.scheduler.client.report [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1093.209834] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508841, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.282917] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "1459221f-4c35-4a49-a8c0-f8b4ee3e2265" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.283200] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "1459221f-4c35-4a49-a8c0-f8b4ee3e2265" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.322871] env[69475]: DEBUG oslo_concurrency.lockutils [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] Releasing lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.323184] env[69475]: DEBUG nova.compute.manager [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Received event network-vif-deleted-316b2c71-6909-4d98-a09c-c3c58878a1ed {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1093.323386] env[69475]: DEBUG nova.compute.manager [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Received event network-vif-plugged-9d4929eb-bec9-43f8-9341-df239fb9a0a7 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1093.323576] env[69475]: DEBUG oslo_concurrency.lockutils [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] Acquiring lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.323782] env[69475]: DEBUG oslo_concurrency.lockutils [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] Lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.323974] env[69475]: DEBUG oslo_concurrency.lockutils [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] Lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.324159] env[69475]: DEBUG nova.compute.manager [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] No waiting events found dispatching network-vif-plugged-9d4929eb-bec9-43f8-9341-df239fb9a0a7 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1093.324392] env[69475]: WARNING nova.compute.manager [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Received unexpected event network-vif-plugged-9d4929eb-bec9-43f8-9341-df239fb9a0a7 for instance with vm_state active and task_state None. [ 1093.324561] env[69475]: DEBUG nova.compute.manager [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Received event network-changed-9d4929eb-bec9-43f8-9341-df239fb9a0a7 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1093.324717] env[69475]: DEBUG nova.compute.manager [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Refreshing instance network info cache due to event network-changed-9d4929eb-bec9-43f8-9341-df239fb9a0a7. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1093.324934] env[69475]: DEBUG oslo_concurrency.lockutils [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] Acquiring lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1093.325085] env[69475]: DEBUG oslo_concurrency.lockutils [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] Acquired lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1093.325259] env[69475]: DEBUG nova.network.neutron [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Refreshing network info cache for port 9d4929eb-bec9-43f8-9341-df239fb9a0a7 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1093.452900] env[69475]: DEBUG oslo_vmware.api [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508840, 'name': ReconfigVM_Task, 'duration_secs': 1.615125} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.457247] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.457437] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Reconfigured VM to attach interface {{(pid=69475) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1093.464643] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508847, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.473290] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508844, 'name': PowerOnVM_Task} progress is 78%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.484909] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5285985d-b370-0d47-38d1-5e1531ba80af, 'name': SearchDatastore_Task, 'duration_secs': 0.088194} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.485752] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd5bfe4c-bf84-4108-a688-dfdb35f95579 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.493290] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1093.493290] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d5382b-0c6f-f4ee-d992-b829f130bd2c" [ 1093.493290] env[69475]: _type = "Task" [ 1093.493290] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.502930] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d5382b-0c6f-f4ee-d992-b829f130bd2c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.524788] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508848, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.563896] env[69475]: DEBUG oslo_concurrency.lockutils [req-71e9ce7a-22b8-425c-bf96-2ea5ec5983f2 req-b1e7ea7a-d704-43f8-a92c-a70c3e2bd447 service nova] Releasing lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1093.696339] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.916s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.698944] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.560s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.700553] env[69475]: INFO nova.compute.claims [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1093.715716] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508841, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.721589] env[69475]: INFO nova.scheduler.client.report [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Deleted allocations for instance 97013703-3506-4441-b80c-cbb5c7e29bdf [ 1093.786157] env[69475]: DEBUG nova.compute.manager [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1093.959884] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508847, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.969646] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1c93fafe-c0ea-4303-b15c-bc0a3486da12 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-9d4929eb-bec9-43f8-9341-df239fb9a0a7" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.094s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.970861] env[69475]: DEBUG oslo_vmware.api [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508844, 'name': PowerOnVM_Task, 'duration_secs': 1.351581} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.970861] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1093.971612] env[69475]: INFO nova.compute.manager [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Took 10.42 seconds to spawn the instance on the hypervisor. [ 1093.971721] env[69475]: DEBUG nova.compute.manager [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1093.972573] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2982d5f7-3182-41ea-a92e-1ba9b8e133e8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.006376] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d5382b-0c6f-f4ee-d992-b829f130bd2c, 'name': SearchDatastore_Task, 'duration_secs': 0.079994} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.006687] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.006956] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 460d4b93-b18a-4965-9e2b-8c6175ccc91f/460d4b93-b18a-4965-9e2b-8c6175ccc91f.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1094.007245] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fb33a95e-381e-40fe-91ec-71515089d005 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.014520] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1094.014520] env[69475]: value = "task-3508849" [ 1094.014520] env[69475]: _type = "Task" [ 1094.014520] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.027558] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508848, 'name': CreateVM_Task, 'duration_secs': 0.581206} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.030618] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1094.030930] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508849, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.031626] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.031796] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1094.032142] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1094.032719] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3aaa7eff-66a4-421e-9204-8dd415d6439e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.042316] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1094.042316] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525390de-1b09-d25f-562b-710a9e3bc432" [ 1094.042316] env[69475]: _type = "Task" [ 1094.042316] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.053347] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525390de-1b09-d25f-562b-710a9e3bc432, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.072141] env[69475]: DEBUG nova.network.neutron [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Updated VIF entry in instance network info cache for port 9d4929eb-bec9-43f8-9341-df239fb9a0a7. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1094.072707] env[69475]: DEBUG nova.network.neutron [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Updating instance_info_cache with network_info: [{"id": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "address": "fa:16:3e:34:5a:a2", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d51ee71-84", "ovs_interfaceid": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e60a34f2-9926-41dc-a777-3d0e92f22ce9", "address": "fa:16:3e:1b:e3:de", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape60a34f2-99", "ovs_interfaceid": "e60a34f2-9926-41dc-a777-3d0e92f22ce9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9d4929eb-bec9-43f8-9341-df239fb9a0a7", "address": "fa:16:3e:8b:2b:05", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d4929eb-be", "ovs_interfaceid": "9d4929eb-bec9-43f8-9341-df239fb9a0a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.220173] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508841, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.229795] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a489be0c-82bf-4365-950f-f191bd680785 tempest-ServersListShow2100Test-1581118720 tempest-ServersListShow2100Test-1581118720-project-member] Lock "97013703-3506-4441-b80c-cbb5c7e29bdf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.259s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.307143] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.416071] env[69475]: DEBUG nova.compute.manager [req-f3d41872-a8ff-463d-9e2c-3543b207d9cd req-d093f71e-ee5c-4abb-8bec-bd60246754fe service nova] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Received event network-vif-plugged-cfc6e6cb-798d-4b99-8764-5faf560ca662 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1094.416360] env[69475]: DEBUG oslo_concurrency.lockutils [req-f3d41872-a8ff-463d-9e2c-3543b207d9cd req-d093f71e-ee5c-4abb-8bec-bd60246754fe service nova] Acquiring lock "96533442-eb53-4bc2-bda3-71efc973d403-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.416519] env[69475]: DEBUG oslo_concurrency.lockutils [req-f3d41872-a8ff-463d-9e2c-3543b207d9cd req-d093f71e-ee5c-4abb-8bec-bd60246754fe service nova] Lock "96533442-eb53-4bc2-bda3-71efc973d403-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.416739] env[69475]: DEBUG oslo_concurrency.lockutils [req-f3d41872-a8ff-463d-9e2c-3543b207d9cd req-d093f71e-ee5c-4abb-8bec-bd60246754fe service nova] Lock "96533442-eb53-4bc2-bda3-71efc973d403-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.416942] env[69475]: DEBUG nova.compute.manager [req-f3d41872-a8ff-463d-9e2c-3543b207d9cd req-d093f71e-ee5c-4abb-8bec-bd60246754fe service nova] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] No waiting events found dispatching network-vif-plugged-cfc6e6cb-798d-4b99-8764-5faf560ca662 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1094.417091] env[69475]: WARNING nova.compute.manager [req-f3d41872-a8ff-463d-9e2c-3543b207d9cd req-d093f71e-ee5c-4abb-8bec-bd60246754fe service nova] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Received unexpected event network-vif-plugged-cfc6e6cb-798d-4b99-8764-5faf560ca662 for instance with vm_state building and task_state spawning. [ 1094.417254] env[69475]: DEBUG nova.compute.manager [req-f3d41872-a8ff-463d-9e2c-3543b207d9cd req-d093f71e-ee5c-4abb-8bec-bd60246754fe service nova] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Received event network-changed-cfc6e6cb-798d-4b99-8764-5faf560ca662 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1094.417408] env[69475]: DEBUG nova.compute.manager [req-f3d41872-a8ff-463d-9e2c-3543b207d9cd req-d093f71e-ee5c-4abb-8bec-bd60246754fe service nova] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Refreshing instance network info cache due to event network-changed-cfc6e6cb-798d-4b99-8764-5faf560ca662. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1094.417593] env[69475]: DEBUG oslo_concurrency.lockutils [req-f3d41872-a8ff-463d-9e2c-3543b207d9cd req-d093f71e-ee5c-4abb-8bec-bd60246754fe service nova] Acquiring lock "refresh_cache-96533442-eb53-4bc2-bda3-71efc973d403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.417737] env[69475]: DEBUG oslo_concurrency.lockutils [req-f3d41872-a8ff-463d-9e2c-3543b207d9cd req-d093f71e-ee5c-4abb-8bec-bd60246754fe service nova] Acquired lock "refresh_cache-96533442-eb53-4bc2-bda3-71efc973d403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1094.417891] env[69475]: DEBUG nova.network.neutron [req-f3d41872-a8ff-463d-9e2c-3543b207d9cd req-d093f71e-ee5c-4abb-8bec-bd60246754fe service nova] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Refreshing network info cache for port cfc6e6cb-798d-4b99-8764-5faf560ca662 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1094.455597] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508847, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.492902] env[69475]: INFO nova.compute.manager [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Took 26.86 seconds to build instance. [ 1094.528605] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508849, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.552506] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525390de-1b09-d25f-562b-710a9e3bc432, 'name': SearchDatastore_Task, 'duration_secs': 0.0209} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.552822] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.553084] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1094.553330] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1094.553482] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1094.553659] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1094.553943] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2b02ee2-a2aa-4fbc-9e86-550a0b9c7632 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.566739] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1094.567238] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1094.568281] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b96ee88-91a5-4d46-a9df-a0167827f590 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.576262] env[69475]: DEBUG oslo_concurrency.lockutils [req-f4e33ca3-8c00-49ef-9b70-d9d3c813f367 req-81b02937-4eda-4fd5-a15e-d413468c9ab5 service nova] Releasing lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1094.579324] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1094.579324] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522eef13-842d-80c5-c1b2-32df237beeaa" [ 1094.579324] env[69475]: _type = "Task" [ 1094.579324] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.588313] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522eef13-842d-80c5-c1b2-32df237beeaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.726167] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508841, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.61707} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.726499] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a9351d13-720c-49e6-a8e9-3fac7da2b98a/a9351d13-720c-49e6-a8e9-3fac7da2b98a.vmdk to [datastore1] 8f18d683-7734-4798-8963-7336fe229f16/8f18d683-7734-4798-8963-7336fe229f16.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1094.727376] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da4be26-5d1a-4d81-8a14-3b3b1b2e2b48 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.759456] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 8f18d683-7734-4798-8963-7336fe229f16/8f18d683-7734-4798-8963-7336fe229f16.vmdk or device None with type streamOptimized {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1094.762206] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0330eab-f078-4c72-b904-ddd3af9460d9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.785556] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1094.785556] env[69475]: value = "task-3508851" [ 1094.785556] env[69475]: _type = "Task" [ 1094.785556] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.799264] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508851, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.959086] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508847, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.006743} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.961676] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9/60516e16-bd7e-4fc1-b95f-603fb5ef6ae9.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1094.961813] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1094.962252] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cbf84cc6-c47d-4429-a9b4-298961c1c8bb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.971446] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Waiting for the task: (returnval){ [ 1094.971446] env[69475]: value = "task-3508852" [ 1094.971446] env[69475]: _type = "Task" [ 1094.971446] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.979550] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508852, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.995678] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45b32dad-f539-451a-8814-4a39d75119a2 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.374s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.024066] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f9c4dd-65fe-435e-ae78-5d06c257512f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.035553] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508849, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.038612] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e64cca36-c566-44f0-9aba-5c94a27d1b32 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.081523] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3efbce-09c7-452d-9189-84b923689075 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.095776] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522eef13-842d-80c5-c1b2-32df237beeaa, 'name': SearchDatastore_Task, 'duration_secs': 0.03242} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.097428] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1498ac66-83b9-4426-b156-02cb919e388c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.102727] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5973edaa-76c2-4e12-add6-3a855cb457db {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.117414] env[69475]: DEBUG nova.compute.provider_tree [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1095.121523] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1095.121523] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b95ea9-eb0a-dfa7-8094-c84aa81d5fb6" [ 1095.121523] env[69475]: _type = "Task" [ 1095.121523] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.137337] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b95ea9-eb0a-dfa7-8094-c84aa81d5fb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.205650] env[69475]: DEBUG nova.network.neutron [req-f3d41872-a8ff-463d-9e2c-3543b207d9cd req-d093f71e-ee5c-4abb-8bec-bd60246754fe service nova] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Updated VIF entry in instance network info cache for port cfc6e6cb-798d-4b99-8764-5faf560ca662. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1095.206166] env[69475]: DEBUG nova.network.neutron [req-f3d41872-a8ff-463d-9e2c-3543b207d9cd req-d093f71e-ee5c-4abb-8bec-bd60246754fe service nova] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Updating instance_info_cache with network_info: [{"id": "cfc6e6cb-798d-4b99-8764-5faf560ca662", "address": "fa:16:3e:5b:4e:7b", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcfc6e6cb-79", "ovs_interfaceid": "cfc6e6cb-798d-4b99-8764-5faf560ca662", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.297516] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508851, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.458175] env[69475]: DEBUG nova.compute.manager [req-d24f821e-5189-4c92-9c5a-b0a9027911ed req-87410941-2644-4a82-803a-d2618a2b2d05 service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Received event network-changed-b2b04f22-0a1e-4c90-b84f-5d119fc7e528 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1095.458175] env[69475]: DEBUG nova.compute.manager [req-d24f821e-5189-4c92-9c5a-b0a9027911ed req-87410941-2644-4a82-803a-d2618a2b2d05 service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Refreshing instance network info cache due to event network-changed-b2b04f22-0a1e-4c90-b84f-5d119fc7e528. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1095.458175] env[69475]: DEBUG oslo_concurrency.lockutils [req-d24f821e-5189-4c92-9c5a-b0a9027911ed req-87410941-2644-4a82-803a-d2618a2b2d05 service nova] Acquiring lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.458175] env[69475]: DEBUG oslo_concurrency.lockutils [req-d24f821e-5189-4c92-9c5a-b0a9027911ed req-87410941-2644-4a82-803a-d2618a2b2d05 service nova] Acquired lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1095.458175] env[69475]: DEBUG nova.network.neutron [req-d24f821e-5189-4c92-9c5a-b0a9027911ed req-87410941-2644-4a82-803a-d2618a2b2d05 service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Refreshing network info cache for port b2b04f22-0a1e-4c90-b84f-5d119fc7e528 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1095.481788] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508852, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070561} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.482012] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1095.482843] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ad85c25-8386-4f1c-9f85-58e06f3a5629 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.505211] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9/60516e16-bd7e-4fc1-b95f-603fb5ef6ae9.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1095.505843] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d2fd878-87c1-4fc3-98cd-75ae9b7c6146 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.529849] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508849, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.41099} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.531168] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 460d4b93-b18a-4965-9e2b-8c6175ccc91f/460d4b93-b18a-4965-9e2b-8c6175ccc91f.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1095.531435] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1095.531789] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Waiting for the task: (returnval){ [ 1095.531789] env[69475]: value = "task-3508853" [ 1095.531789] env[69475]: _type = "Task" [ 1095.531789] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.531987] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ef30fd86-852d-4fb7-887c-929112c9c8c5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.542187] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508853, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.543336] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1095.543336] env[69475]: value = "task-3508854" [ 1095.543336] env[69475]: _type = "Task" [ 1095.543336] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.552146] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508854, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.623738] env[69475]: DEBUG nova.scheduler.client.report [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1095.639249] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52b95ea9-eb0a-dfa7-8094-c84aa81d5fb6, 'name': SearchDatastore_Task, 'duration_secs': 0.026482} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.639559] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1095.639903] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 96533442-eb53-4bc2-bda3-71efc973d403/96533442-eb53-4bc2-bda3-71efc973d403.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1095.640331] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29e02ca4-fbdd-4715-a52f-c54cfbd5137c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.649118] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1095.649118] env[69475]: value = "task-3508855" [ 1095.649118] env[69475]: _type = "Task" [ 1095.649118] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.659655] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508855, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.711122] env[69475]: DEBUG oslo_concurrency.lockutils [req-f3d41872-a8ff-463d-9e2c-3543b207d9cd req-d093f71e-ee5c-4abb-8bec-bd60246754fe service nova] Releasing lock "refresh_cache-96533442-eb53-4bc2-bda3-71efc973d403" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1095.711480] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Acquiring lock "55d3513b-e0ad-49a7-bd26-147b1b2632cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.711712] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Lock "55d3513b-e0ad-49a7-bd26-147b1b2632cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.799336] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508851, 'name': ReconfigVM_Task, 'duration_secs': 0.948624} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.799534] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 8f18d683-7734-4798-8963-7336fe229f16/8f18d683-7734-4798-8963-7336fe229f16.vmdk or device None with type streamOptimized {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1095.800262] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-107cb32d-ee8b-471c-be30-71f99908a713 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.806300] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1095.806300] env[69475]: value = "task-3508856" [ 1095.806300] env[69475]: _type = "Task" [ 1095.806300] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.814854] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508856, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.047092] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508853, 'name': ReconfigVM_Task, 'duration_secs': 0.336528} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.050481] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9/60516e16-bd7e-4fc1-b95f-603fb5ef6ae9.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1096.051223] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6c2a1192-4a81-4bcf-9dc9-b5942b737653 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.058946] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508854, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.122966} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.060302] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1096.060668] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Waiting for the task: (returnval){ [ 1096.060668] env[69475]: value = "task-3508857" [ 1096.060668] env[69475]: _type = "Task" [ 1096.060668] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.061381] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2a424e-93a1-4656-a584-9bb87fe2e71a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.064165] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "interface-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-e60a34f2-9926-41dc-a777-3d0e92f22ce9" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.064424] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-e60a34f2-9926-41dc-a777-3d0e92f22ce9" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.090023] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 460d4b93-b18a-4965-9e2b-8c6175ccc91f/460d4b93-b18a-4965-9e2b-8c6175ccc91f.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1096.097337] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54bf0993-6dbc-423e-8c73-c529f84f9a01 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.113151] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508857, 'name': Rename_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.120536] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1096.120536] env[69475]: value = "task-3508858" [ 1096.120536] env[69475]: _type = "Task" [ 1096.120536] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.130680] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.432s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.131248] env[69475]: DEBUG nova.compute.manager [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1096.133892] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508858, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.134193] env[69475]: DEBUG oslo_concurrency.lockutils [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.648s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.134448] env[69475]: DEBUG nova.objects.instance [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lazy-loading 'resources' on Instance uuid 20b37e69-5870-4f63-aeba-9293615da478 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1096.165589] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508855, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.214357] env[69475]: DEBUG nova.compute.manager [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1096.259437] env[69475]: DEBUG nova.network.neutron [req-d24f821e-5189-4c92-9c5a-b0a9027911ed req-87410941-2644-4a82-803a-d2618a2b2d05 service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Updated VIF entry in instance network info cache for port b2b04f22-0a1e-4c90-b84f-5d119fc7e528. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1096.259881] env[69475]: DEBUG nova.network.neutron [req-d24f821e-5189-4c92-9c5a-b0a9027911ed req-87410941-2644-4a82-803a-d2618a2b2d05 service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Updating instance_info_cache with network_info: [{"id": "b2b04f22-0a1e-4c90-b84f-5d119fc7e528", "address": "fa:16:3e:2e:52:22", "network": {"id": "a5ecc342-ff26-4d68-9810-30407b73463d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-941429832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a68f54aa603f46468f50c83cd4fa3e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2b04f22-0a", "ovs_interfaceid": "b2b04f22-0a1e-4c90-b84f-5d119fc7e528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1096.316108] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508856, 'name': Rename_Task, 'duration_secs': 0.195347} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.316389] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1096.316644] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b285e1ee-ee3c-4776-a8cc-b07a56c1fa83 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.322788] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1096.322788] env[69475]: value = "task-3508859" [ 1096.322788] env[69475]: _type = "Task" [ 1096.322788] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.331143] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508859, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.570243] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.570484] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1096.576442] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d73d72-ade8-406b-b058-57baf088100f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.580581] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508857, 'name': Rename_Task, 'duration_secs': 0.176273} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.580975] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1096.581841] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b028afe4-eae4-4f60-99c8-ae65fa70bf2f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.601483] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe7dad3-8e07-4f99-9588-7832039d62f2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.606071] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Waiting for the task: (returnval){ [ 1096.606071] env[69475]: value = "task-3508860" [ 1096.606071] env[69475]: _type = "Task" [ 1096.606071] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.634120] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Reconfiguring VM to detach interface {{(pid=69475) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1096.638784] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46958b2b-16e9-4ea4-80ff-3aefcd8e7206 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.658531] env[69475]: DEBUG nova.compute.utils [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1096.663486] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508860, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.664074] env[69475]: DEBUG nova.compute.manager [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Not allocating networking since 'none' was specified. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1096.672971] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508858, 'name': ReconfigVM_Task, 'duration_secs': 0.514737} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.675490] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 460d4b93-b18a-4965-9e2b-8c6175ccc91f/460d4b93-b18a-4965-9e2b-8c6175ccc91f.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1096.676259] env[69475]: DEBUG oslo_vmware.api [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1096.676259] env[69475]: value = "task-3508861" [ 1096.676259] env[69475]: _type = "Task" [ 1096.676259] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.676819] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ad2cddb7-b79f-4bef-a3ca-dabbbe1d47d6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.687403] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508855, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63428} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.690558] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 96533442-eb53-4bc2-bda3-71efc973d403/96533442-eb53-4bc2-bda3-71efc973d403.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1096.690783] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1096.691592] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b0a5dfec-168f-4ccd-9fd1-d8e2cd3b7826 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.698354] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1096.698354] env[69475]: value = "task-3508862" [ 1096.698354] env[69475]: _type = "Task" [ 1096.698354] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.698598] env[69475]: DEBUG oslo_vmware.api [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.706827] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1096.706827] env[69475]: value = "task-3508863" [ 1096.706827] env[69475]: _type = "Task" [ 1096.706827] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.714244] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508862, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.719515] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508863, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.740182] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.765700] env[69475]: DEBUG oslo_concurrency.lockutils [req-d24f821e-5189-4c92-9c5a-b0a9027911ed req-87410941-2644-4a82-803a-d2618a2b2d05 service nova] Releasing lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1096.837943] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508859, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.941611] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b331f0-1750-4ee2-8953-83b18c880406 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.948831] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ea3630-bc20-484f-805c-8c63585973e4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.983451] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58a3a65f-891c-4c1f-b6b7-ee8ba8350ddc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.991311] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c44ae31-2536-4f8a-8ed2-fd19a9198181 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.007059] env[69475]: DEBUG nova.compute.provider_tree [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1097.051668] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Volume attach. Driver type: vmdk {{(pid=69475) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1097.051958] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701117', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'name': 'volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4f091501-351c-45b8-9f64-4d28d4623df8', 'attached_at': '', 'detached_at': '', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'serial': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1097.052927] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91cf26c0-77ce-47bf-a9ba-b72ac6b8e279 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.071211] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fffc215-8953-40be-be6b-0a495ee659bf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.096804] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a/volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1097.097075] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7d7c5ec-78be-4286-ad4e-4188fbb5fffe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.119236] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508860, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.120518] env[69475]: DEBUG oslo_vmware.api [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1097.120518] env[69475]: value = "task-3508864" [ 1097.120518] env[69475]: _type = "Task" [ 1097.120518] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.128266] env[69475]: DEBUG oslo_vmware.api [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508864, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.162233] env[69475]: DEBUG nova.compute.manager [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1097.187809] env[69475]: DEBUG oslo_vmware.api [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.208215] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508862, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.215160] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508863, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.157795} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.215463] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1097.216216] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76755068-2b73-4f37-a3ca-00b374d04a2d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.238149] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] 96533442-eb53-4bc2-bda3-71efc973d403/96533442-eb53-4bc2-bda3-71efc973d403.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1097.238397] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11a10368-f72b-4fad-906c-bd8848ab9c9f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.256959] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1097.256959] env[69475]: value = "task-3508865" [ 1097.256959] env[69475]: _type = "Task" [ 1097.256959] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.265187] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508865, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.333278] env[69475]: DEBUG oslo_vmware.api [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508859, 'name': PowerOnVM_Task, 'duration_secs': 0.543932} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.333583] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1097.435941] env[69475]: DEBUG nova.compute.manager [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1097.436974] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d9728d-97b6-4dc1-b620-4579ebcc2a3d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.510046] env[69475]: DEBUG nova.scheduler.client.report [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1097.621092] env[69475]: DEBUG oslo_vmware.api [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508860, 'name': PowerOnVM_Task, 'duration_secs': 0.563847} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.621448] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1097.621751] env[69475]: INFO nova.compute.manager [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Took 11.61 seconds to spawn the instance on the hypervisor. [ 1097.622027] env[69475]: DEBUG nova.compute.manager [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1097.626127] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bfbe3bd-fe16-4234-874f-a4edf1645ace {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.635908] env[69475]: DEBUG oslo_vmware.api [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508864, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.689015] env[69475]: DEBUG oslo_vmware.api [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.710143] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508862, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.769392] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508865, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.954160] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5ac29ef4-640a-45f2-afc8-14456440970d tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "8f18d683-7734-4798-8963-7336fe229f16" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 31.867s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.015514] env[69475]: DEBUG oslo_concurrency.lockutils [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.881s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.018023] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 6.167s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.039564] env[69475]: INFO nova.scheduler.client.report [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Deleted allocations for instance 20b37e69-5870-4f63-aeba-9293615da478 [ 1098.131264] env[69475]: DEBUG oslo_vmware.api [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508864, 'name': ReconfigVM_Task, 'duration_secs': 0.813636} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.131583] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Reconfigured VM instance instance-00000062 to attach disk [datastore2] volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a/volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1098.136827] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2c6c400-82ce-4413-8205-494e439c5056 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.152892] env[69475]: INFO nova.compute.manager [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Took 30.03 seconds to build instance. [ 1098.155917] env[69475]: DEBUG oslo_vmware.api [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1098.155917] env[69475]: value = "task-3508866" [ 1098.155917] env[69475]: _type = "Task" [ 1098.155917] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.164211] env[69475]: DEBUG oslo_vmware.api [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508866, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.172453] env[69475]: DEBUG nova.compute.manager [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1098.190880] env[69475]: DEBUG oslo_vmware.api [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.199773] env[69475]: DEBUG nova.virt.hardware [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1098.199964] env[69475]: DEBUG nova.virt.hardware [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1098.200138] env[69475]: DEBUG nova.virt.hardware [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1098.200328] env[69475]: DEBUG nova.virt.hardware [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1098.200474] env[69475]: DEBUG nova.virt.hardware [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1098.200620] env[69475]: DEBUG nova.virt.hardware [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1098.200906] env[69475]: DEBUG nova.virt.hardware [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1098.201109] env[69475]: DEBUG nova.virt.hardware [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1098.201291] env[69475]: DEBUG nova.virt.hardware [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1098.201457] env[69475]: DEBUG nova.virt.hardware [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1098.201629] env[69475]: DEBUG nova.virt.hardware [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1098.202801] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de2ba65-9932-4b25-aeb9-50d5c8dad14a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.215818] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a7b324-ebed-42dc-a1d6-b0c25d5a0a92 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.219413] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508862, 'name': Rename_Task, 'duration_secs': 1.110553} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.219675] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1098.220183] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-382de5b9-bd50-4edb-9c27-7b4f20148d80 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.229615] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Instance VIF info [] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1098.235105] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Creating folder: Project (b9a2b986ee8f42558444e57e7fec7a38). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1098.236481] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-282a488e-5f40-487b-9349-109023ed5426 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.237939] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1098.237939] env[69475]: value = "task-3508867" [ 1098.237939] env[69475]: _type = "Task" [ 1098.237939] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.245821] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508867, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.246951] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Created folder: Project (b9a2b986ee8f42558444e57e7fec7a38) in parent group-v700823. [ 1098.247146] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Creating folder: Instances. Parent ref: group-v701119. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1098.247359] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5c5be3c-4ff5-412a-85e7-30eee01b32d4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.256420] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Created folder: Instances in parent group-v701119. [ 1098.256683] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1098.256935] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1098.257166] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d8169065-b291-475f-af6b-e5be297c648c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.280300] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508865, 'name': ReconfigVM_Task, 'duration_secs': 0.64145} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.281730] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Reconfigured VM instance instance-0000006c to attach disk [datastore2] 96533442-eb53-4bc2-bda3-71efc973d403/96533442-eb53-4bc2-bda3-71efc973d403.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1098.282358] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1098.282358] env[69475]: value = "task-3508870" [ 1098.282358] env[69475]: _type = "Task" [ 1098.282358] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.282568] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-53d3e822-8479-4ffd-a5c1-bc95615bead4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.292974] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508870, 'name': CreateVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.293999] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1098.293999] env[69475]: value = "task-3508871" [ 1098.293999] env[69475]: _type = "Task" [ 1098.293999] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.302220] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508871, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.546913] env[69475]: DEBUG oslo_concurrency.lockutils [None req-273587f4-4305-4902-8175-ed495528776d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "20b37e69-5870-4f63-aeba-9293615da478" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.931s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.655281] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8648bbd5-9b2c-46d6-9568-3731dc6bf148 tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Lock "60516e16-bd7e-4fc1-b95f-603fb5ef6ae9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.544s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.665866] env[69475]: DEBUG oslo_vmware.api [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508866, 'name': ReconfigVM_Task, 'duration_secs': 0.203175} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.666175] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701117', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'name': 'volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4f091501-351c-45b8-9f64-4d28d4623df8', 'attached_at': '', 'detached_at': '', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'serial': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1098.690526] env[69475]: DEBUG oslo_vmware.api [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.748878] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508867, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.794228] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508870, 'name': CreateVM_Task, 'duration_secs': 0.490671} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.794500] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1098.797622] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.797784] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.798107] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1098.798817] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-147c5d67-919f-47d1-9d8a-ba3897dee1af {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.804857] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508871, 'name': Rename_Task, 'duration_secs': 0.176426} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.806037] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1098.806355] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1098.806355] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523aa08e-1be4-ebb8-b96a-ae0dc75d93c8" [ 1098.806355] env[69475]: _type = "Task" [ 1098.806355] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.806532] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e0b7b17-5cd2-4dd7-b393-8ad5a54701e5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.815922] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523aa08e-1be4-ebb8-b96a-ae0dc75d93c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.817037] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1098.817037] env[69475]: value = "task-3508872" [ 1098.817037] env[69475]: _type = "Task" [ 1098.817037] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.824121] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508872, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.031116] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Applying migration context for instance baf27027-678d-4167-bb9b-df410aeb0e82 as it has an incoming, in-progress migration 4fac22fe-611e-4785-b07c-06b01264b8f7. Migration status is confirming {{(pid=69475) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1046}} [ 1099.033256] env[69475]: INFO nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating resource usage from migration 4fac22fe-611e-4785-b07c-06b01264b8f7 [ 1099.053280] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 8d50b322-fa03-4e48-b74b-a63578e4701c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1099.053513] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 4066a18f-acc5-49b5-941c-0711f29bdcd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1099.053686] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 41ddf915-343b-46e4-834e-11ab3899242f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1099.053848] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 4100fb43-1dae-40b1-8caa-11dd67962274 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1099.054013] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance f8a82046-4589-45d2-a7a3-466fe4d8f9c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1099.054176] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1099.054374] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 4f091501-351c-45b8-9f64-4d28d4623df8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1099.054557] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 8f18d683-7734-4798-8963-7336fe229f16 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1099.054725] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Migration 4fac22fe-611e-4785-b07c-06b01264b8f7 is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1742}} [ 1099.054878] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance baf27027-678d-4167-bb9b-df410aeb0e82 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1099.055073] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance e10a197a-a9b7-43ce-b8a8-ce186619feb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1099.055240] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 92020fc6-aff6-437f-9e26-a5b61ea7e76f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1099.055386] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1099.055538] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 460d4b93-b18a-4965-9e2b-8c6175ccc91f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1099.055686] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 96533442-eb53-4bc2-bda3-71efc973d403 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1099.055835] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1099.192887] env[69475]: DEBUG oslo_vmware.api [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.248675] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508867, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.280184] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995c751a-cb2d-4412-9ce6-c39a0fbc214b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.286505] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc36c28-9397-4f0c-84e0-730d8658e0c5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Suspending the VM {{(pid=69475) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1099.286737] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-ddac5476-d683-4189-a4e6-61c8d68ac036 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.292924] env[69475]: DEBUG oslo_vmware.api [None req-dfc36c28-9397-4f0c-84e0-730d8658e0c5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1099.292924] env[69475]: value = "task-3508873" [ 1099.292924] env[69475]: _type = "Task" [ 1099.292924] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.300547] env[69475]: DEBUG oslo_vmware.api [None req-dfc36c28-9397-4f0c-84e0-730d8658e0c5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508873, 'name': SuspendVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.317142] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523aa08e-1be4-ebb8-b96a-ae0dc75d93c8, 'name': SearchDatastore_Task, 'duration_secs': 0.015488} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.317319] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.317558] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1099.317796] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.317940] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.318135] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1099.318441] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aaea166a-e12e-4f31-827d-2c3f84245e56 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.328373] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508872, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.329426] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1099.329598] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1099.330300] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fa0943c-5997-401e-8da3-1023a365e3d8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.332593] env[69475]: DEBUG oslo_concurrency.lockutils [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Acquiring lock "60516e16-bd7e-4fc1-b95f-603fb5ef6ae9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.332808] env[69475]: DEBUG oslo_concurrency.lockutils [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Lock "60516e16-bd7e-4fc1-b95f-603fb5ef6ae9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.333014] env[69475]: DEBUG oslo_concurrency.lockutils [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Acquiring lock "60516e16-bd7e-4fc1-b95f-603fb5ef6ae9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.333202] env[69475]: DEBUG oslo_concurrency.lockutils [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Lock "60516e16-bd7e-4fc1-b95f-603fb5ef6ae9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1099.333369] env[69475]: DEBUG oslo_concurrency.lockutils [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Lock "60516e16-bd7e-4fc1-b95f-603fb5ef6ae9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1099.336172] env[69475]: INFO nova.compute.manager [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Terminating instance [ 1099.339084] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1099.339084] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524b8fab-b271-7740-d5b0-eddd95d9288e" [ 1099.339084] env[69475]: _type = "Task" [ 1099.339084] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.348680] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524b8fab-b271-7740-d5b0-eddd95d9288e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.559543] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 1459221f-4c35-4a49-a8c0-f8b4ee3e2265 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 1099.691957] env[69475]: DEBUG oslo_vmware.api [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.703534] env[69475]: DEBUG nova.objects.instance [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lazy-loading 'flavor' on Instance uuid 4f091501-351c-45b8-9f64-4d28d4623df8 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1099.749329] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508867, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.804062] env[69475]: DEBUG oslo_vmware.api [None req-dfc36c28-9397-4f0c-84e0-730d8658e0c5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508873, 'name': SuspendVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.829932] env[69475]: DEBUG oslo_vmware.api [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508872, 'name': PowerOnVM_Task, 'duration_secs': 1.011859} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.830554] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1099.830663] env[69475]: INFO nova.compute.manager [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Took 9.00 seconds to spawn the instance on the hypervisor. [ 1099.830870] env[69475]: DEBUG nova.compute.manager [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1099.831833] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-384c2a39-1efc-42a7-8c35-f0f2a3442ad9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.847543] env[69475]: DEBUG nova.compute.manager [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1099.847777] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1099.848783] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a161c484-9f48-469a-8f48-aa08723ff680 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.856940] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524b8fab-b271-7740-d5b0-eddd95d9288e, 'name': SearchDatastore_Task, 'duration_secs': 0.011274} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.859669] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1099.859909] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d288fc06-976d-4e8f-ae4a-fec780063350 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.862540] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e8c2fb3a-fd6f-465f-b284-19f9ba2e850d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.866604] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1099.866604] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52439d7c-8164-0c8e-8891-ba2ce20251c0" [ 1099.866604] env[69475]: _type = "Task" [ 1099.866604] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.871242] env[69475]: DEBUG oslo_vmware.api [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Waiting for the task: (returnval){ [ 1099.871242] env[69475]: value = "task-3508874" [ 1099.871242] env[69475]: _type = "Task" [ 1099.871242] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.878069] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52439d7c-8164-0c8e-8891-ba2ce20251c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.883329] env[69475]: DEBUG oslo_vmware.api [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508874, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.957399] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cebc7576-5fd1-4e79-a626-712ff0ac4c8d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "41ddf915-343b-46e4-834e-11ab3899242f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1099.957645] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cebc7576-5fd1-4e79-a626-712ff0ac4c8d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "41ddf915-343b-46e4-834e-11ab3899242f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.062871] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 55d3513b-e0ad-49a7-bd26-147b1b2632cb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 1100.063216] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1100.063319] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3648MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1100.191680] env[69475]: DEBUG oslo_vmware.api [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.208770] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84d51bb6-afa1-4b6d-b026-e4c75b5838d5 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "4f091501-351c-45b8-9f64-4d28d4623df8" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.272s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.249165] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508867, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.293972] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb6a74fb-6ef7-44cc-83eb-903496361fed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.307447] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31bd133a-1533-479c-a407-71a9e8c7cc2d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.310538] env[69475]: DEBUG oslo_vmware.api [None req-dfc36c28-9397-4f0c-84e0-730d8658e0c5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508873, 'name': SuspendVM_Task, 'duration_secs': 0.66027} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.310784] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-dfc36c28-9397-4f0c-84e0-730d8658e0c5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Suspended the VM {{(pid=69475) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1100.310960] env[69475]: DEBUG nova.compute.manager [None req-dfc36c28-9397-4f0c-84e0-730d8658e0c5 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1100.311931] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fdcb6d3-4bf4-4dcb-a6b6-bc228d5a70fc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.340278] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43b878b7-8807-4af8-a738-b9d931752826 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.354127] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae1eda11-0cc2-49a0-adf1-9b149fdf4e04 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.360223] env[69475]: INFO nova.compute.manager [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Took 30.11 seconds to build instance. [ 1100.371339] env[69475]: DEBUG nova.compute.provider_tree [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.384312] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52439d7c-8164-0c8e-8891-ba2ce20251c0, 'name': SearchDatastore_Task, 'duration_secs': 0.010227} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.387208] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.387468] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c/74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1100.387983] env[69475]: DEBUG oslo_vmware.api [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508874, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.388195] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-75c4c1dd-b004-4092-80c4-d801d6fe0ce6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.394613] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1100.394613] env[69475]: value = "task-3508875" [ 1100.394613] env[69475]: _type = "Task" [ 1100.394613] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.402428] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508875, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.460737] env[69475]: INFO nova.compute.manager [None req-cebc7576-5fd1-4e79-a626-712ff0ac4c8d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Detaching volume 32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288 [ 1100.491497] env[69475]: INFO nova.virt.block_device [None req-cebc7576-5fd1-4e79-a626-712ff0ac4c8d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Attempting to driver detach volume 32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288 from mountpoint /dev/sdb [ 1100.492031] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-cebc7576-5fd1-4e79-a626-712ff0ac4c8d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1100.492031] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-cebc7576-5fd1-4e79-a626-712ff0ac4c8d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701080', 'volume_id': '32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288', 'name': 'volume-32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '41ddf915-343b-46e4-834e-11ab3899242f', 'attached_at': '', 'detached_at': '', 'volume_id': '32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288', 'serial': '32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1100.492750] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae99bdc0-0ab5-4516-930a-53a78112dd88 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.515087] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa5aedc-0ac8-4929-a247-aa1eed8b61e2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.522039] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36bb715-4a85-4cfd-8b55-13602889be61 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.542605] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9ae0c0-692d-467c-878e-45a647e73f59 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.560073] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-cebc7576-5fd1-4e79-a626-712ff0ac4c8d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] The volume has not been displaced from its original location: [datastore1] volume-32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288/volume-32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288.vmdk. No consolidation needed. {{(pid=69475) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1100.564867] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-cebc7576-5fd1-4e79-a626-712ff0ac4c8d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Reconfiguring VM instance instance-00000056 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1100.565233] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91068d54-708d-4bb3-81dd-8b0ba75ce75a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.583255] env[69475]: DEBUG oslo_vmware.api [None req-cebc7576-5fd1-4e79-a626-712ff0ac4c8d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1100.583255] env[69475]: value = "task-3508876" [ 1100.583255] env[69475]: _type = "Task" [ 1100.583255] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.591823] env[69475]: DEBUG oslo_vmware.api [None req-cebc7576-5fd1-4e79-a626-712ff0ac4c8d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508876, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.693507] env[69475]: DEBUG oslo_vmware.api [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.752876] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508867, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.799042] env[69475]: INFO nova.compute.manager [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Rebuilding instance [ 1100.844076] env[69475]: DEBUG nova.compute.manager [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1100.845112] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2b0856-2b9b-4916-9bf9-6e1798ece1b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.862250] env[69475]: DEBUG oslo_concurrency.lockutils [None req-49c358a4-22d0-498e-ac2b-b1a2abafeb23 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "96533442-eb53-4bc2-bda3-71efc973d403" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.622s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.876918] env[69475]: DEBUG nova.scheduler.client.report [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1100.893023] env[69475]: DEBUG oslo_vmware.api [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508874, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.906159] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508875, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.093709] env[69475]: DEBUG oslo_vmware.api [None req-cebc7576-5fd1-4e79-a626-712ff0ac4c8d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508876, 'name': ReconfigVM_Task, 'duration_secs': 0.378471} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.093890] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-cebc7576-5fd1-4e79-a626-712ff0ac4c8d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Reconfigured VM instance instance-00000056 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1101.098641] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f7f38ab-df6a-4cae-9551-e57e733c5a9e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.114940] env[69475]: DEBUG oslo_vmware.api [None req-cebc7576-5fd1-4e79-a626-712ff0ac4c8d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1101.114940] env[69475]: value = "task-3508877" [ 1101.114940] env[69475]: _type = "Task" [ 1101.114940] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.123070] env[69475]: DEBUG oslo_vmware.api [None req-cebc7576-5fd1-4e79-a626-712ff0ac4c8d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508877, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.192765] env[69475]: DEBUG oslo_vmware.api [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.250996] env[69475]: DEBUG oslo_vmware.api [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3508867, 'name': PowerOnVM_Task, 'duration_secs': 2.777052} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.251383] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1101.251675] env[69475]: INFO nova.compute.manager [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Took 12.84 seconds to spawn the instance on the hypervisor. [ 1101.251921] env[69475]: DEBUG nova.compute.manager [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1101.252889] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f44c8d63-0cf7-4bef-b61a-09c6f02caeb3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.387658] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69475) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1101.387775] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.370s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.387994] env[69475]: DEBUG oslo_vmware.api [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508874, 'name': PowerOffVM_Task, 'duration_secs': 1.187536} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.388586] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 8.704s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.390045] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1101.390045] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1101.390244] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2c3fedb5-3195-4c90-89c9-27ed284cc50a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.406254] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508875, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.701483} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.406514] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c/74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1101.406753] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1101.407016] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7d85f203-417f-48da-8855-cc19d4647f4c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.413205] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1101.413205] env[69475]: value = "task-3508879" [ 1101.413205] env[69475]: _type = "Task" [ 1101.413205] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.421496] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508879, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.459048] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1101.459339] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1101.459508] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Deleting the datastore file [datastore1] 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1101.459802] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aeab3b2d-30aa-49b2-8f28-d49581edc1a7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.466358] env[69475]: DEBUG oslo_vmware.api [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Waiting for the task: (returnval){ [ 1101.466358] env[69475]: value = "task-3508880" [ 1101.466358] env[69475]: _type = "Task" [ 1101.466358] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.475611] env[69475]: DEBUG oslo_vmware.api [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508880, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.524323] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "cc85e976-78cf-4289-9674-d697630e7775" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1101.524613] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "cc85e976-78cf-4289-9674-d697630e7775" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.625659] env[69475]: DEBUG oslo_vmware.api [None req-cebc7576-5fd1-4e79-a626-712ff0ac4c8d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508877, 'name': ReconfigVM_Task, 'duration_secs': 0.230086} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.626101] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-cebc7576-5fd1-4e79-a626-712ff0ac4c8d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701080', 'volume_id': '32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288', 'name': 'volume-32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '41ddf915-343b-46e4-834e-11ab3899242f', 'attached_at': '', 'detached_at': '', 'volume_id': '32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288', 'serial': '32e22c9b-f6b7-491f-ba3e-cd0e3fbbc288'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1101.694485] env[69475]: DEBUG oslo_vmware.api [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.771811] env[69475]: INFO nova.compute.manager [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Took 31.86 seconds to build instance. [ 1101.789028] env[69475]: INFO nova.compute.manager [None req-623fcd89-2122-40c7-b484-5cc1c2b18e47 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Resuming [ 1101.789028] env[69475]: DEBUG nova.objects.instance [None req-623fcd89-2122-40c7-b484-5cc1c2b18e47 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lazy-loading 'flavor' on Instance uuid 8f18d683-7734-4798-8963-7336fe229f16 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1101.864060] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1101.864445] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad3d9530-e3a0-4783-8f50-de198033738b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.871647] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1101.871647] env[69475]: value = "task-3508881" [ 1101.871647] env[69475]: _type = "Task" [ 1101.871647] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.880111] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508881, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.925039] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508879, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.224288} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.925039] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1101.926593] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50480c5b-ea4a-4957-9e8c-bdaee4f6199d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.947709] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c/74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1101.951263] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a1025c9a-6a46-494a-b1ae-5494afa9f548 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.018199] env[69475]: DEBUG oslo_vmware.api [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Task: {'id': task-3508880, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.481258} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.018199] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1102.018199] env[69475]: value = "task-3508882" [ 1102.018199] env[69475]: _type = "Task" [ 1102.018199] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.018199] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1102.018613] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1102.018613] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1102.018613] env[69475]: INFO nova.compute.manager [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Took 2.17 seconds to destroy the instance on the hypervisor. [ 1102.022028] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1102.022028] env[69475]: DEBUG nova.compute.manager [-] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1102.022028] env[69475]: DEBUG nova.network.neutron [-] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1102.026690] env[69475]: DEBUG nova.compute.manager [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1102.034782] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508882, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.196181] env[69475]: DEBUG oslo_vmware.api [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508861, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.197846] env[69475]: DEBUG nova.objects.instance [None req-cebc7576-5fd1-4e79-a626-712ff0ac4c8d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lazy-loading 'flavor' on Instance uuid 41ddf915-343b-46e4-834e-11ab3899242f {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1102.260158] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f4f624a-241b-41eb-8a99-9a99b94ad231 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.269130] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eb410a5-3d5d-4a19-9a63-dda85dcca2f9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.274699] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3be5f8a0-f0a8-4ea2-b551-5bfb58186950 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.367s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.303064] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-898cbc8d-e1a4-4d4d-9d11-3ae460b1f35d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.312905] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1165d5bd-e014-43c4-b546-de9b20d8c017 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.329498] env[69475]: DEBUG nova.compute.provider_tree [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1102.381751] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508881, 'name': PowerOffVM_Task, 'duration_secs': 0.375975} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.382100] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1102.420442] env[69475]: DEBUG nova.compute.manager [req-b01cbb94-9776-4d18-87a1-44f5071957c0 req-51a5a780-7ff1-4f2e-b0c0-836218ff886c service nova] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Received event network-vif-deleted-25bf5361-8992-425f-8d46-f45064536466 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1102.420711] env[69475]: INFO nova.compute.manager [req-b01cbb94-9776-4d18-87a1-44f5071957c0 req-51a5a780-7ff1-4f2e-b0c0-836218ff886c service nova] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Neutron deleted interface 25bf5361-8992-425f-8d46-f45064536466; detaching it from the instance and deleting it from the info cache [ 1102.420903] env[69475]: DEBUG nova.network.neutron [req-b01cbb94-9776-4d18-87a1-44f5071957c0 req-51a5a780-7ff1-4f2e-b0c0-836218ff886c service nova] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.461818] env[69475]: INFO nova.compute.manager [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Detaching volume 53fb465d-7ef8-4cfc-bb5b-08b0d766b68a [ 1102.513090] env[69475]: INFO nova.virt.block_device [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Attempting to driver detach volume 53fb465d-7ef8-4cfc-bb5b-08b0d766b68a from mountpoint /dev/sdb [ 1102.513346] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1102.513571] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701117', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'name': 'volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4f091501-351c-45b8-9f64-4d28d4623df8', 'attached_at': '', 'detached_at': '', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'serial': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1102.514552] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131caf46-e168-424f-bc21-f36d4577aa7c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.543164] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3132fd66-fb7c-44b9-ac5a-238c41ac01ea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.554829] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508882, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.558789] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c51af5-5994-40d3-9e38-429bde2ad757 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.562727] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1102.583306] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8217c33-21b4-47ac-b5eb-03ecd23b7e60 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.599753] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] The volume has not been displaced from its original location: [datastore2] volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a/volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a.vmdk. No consolidation needed. {{(pid=69475) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1102.605963] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Reconfiguring VM instance instance-00000062 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1102.606389] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ec4c5a3-59fe-41b7-9d73-b126b3e7da93 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.625181] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1102.625181] env[69475]: value = "task-3508883" [ 1102.625181] env[69475]: _type = "Task" [ 1102.625181] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.634420] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508883, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.696192] env[69475]: DEBUG oslo_vmware.api [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508861, 'name': ReconfigVM_Task, 'duration_secs': 5.848489} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.696462] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1102.696705] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Reconfigured VM to detach interface {{(pid=69475) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1102.849979] env[69475]: DEBUG nova.network.neutron [-] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1102.857747] env[69475]: ERROR nova.scheduler.client.report [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [req-4ad7aa84-0c28-4afb-bb8b-14846ed7f70d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dd221100-68c1-4a75-92b5-b24d81fee5da. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4ad7aa84-0c28-4afb-bb8b-14846ed7f70d"}]} [ 1102.876279] env[69475]: DEBUG nova.scheduler.client.report [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Refreshing inventories for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1102.926046] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b6196223-7818-411a-88b6-d7372c9a6b29 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.935032] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c3b01d-fb96-44ed-a532-737667933113 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.976024] env[69475]: DEBUG nova.compute.manager [req-b01cbb94-9776-4d18-87a1-44f5071957c0 req-51a5a780-7ff1-4f2e-b0c0-836218ff886c service nova] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Detach interface failed, port_id=25bf5361-8992-425f-8d46-f45064536466, reason: Instance 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1102.982855] env[69475]: DEBUG nova.compute.manager [req-e49b4bbf-8e17-4144-857e-07d1ee54cde3 req-e8c7e51e-8916-4d7a-b3df-911aebacd417 service nova] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Received event network-changed-81121438-ec92-4519-97f1-e2a871109623 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1102.983061] env[69475]: DEBUG nova.compute.manager [req-e49b4bbf-8e17-4144-857e-07d1ee54cde3 req-e8c7e51e-8916-4d7a-b3df-911aebacd417 service nova] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Refreshing instance network info cache due to event network-changed-81121438-ec92-4519-97f1-e2a871109623. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1102.983280] env[69475]: DEBUG oslo_concurrency.lockutils [req-e49b4bbf-8e17-4144-857e-07d1ee54cde3 req-e8c7e51e-8916-4d7a-b3df-911aebacd417 service nova] Acquiring lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1102.983422] env[69475]: DEBUG oslo_concurrency.lockutils [req-e49b4bbf-8e17-4144-857e-07d1ee54cde3 req-e8c7e51e-8916-4d7a-b3df-911aebacd417 service nova] Acquired lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1102.983580] env[69475]: DEBUG nova.network.neutron [req-e49b4bbf-8e17-4144-857e-07d1ee54cde3 req-e8c7e51e-8916-4d7a-b3df-911aebacd417 service nova] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Refreshing network info cache for port 81121438-ec92-4519-97f1-e2a871109623 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1103.028424] env[69475]: DEBUG nova.scheduler.client.report [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Updating ProviderTree inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1103.028619] env[69475]: DEBUG nova.compute.provider_tree [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1103.037028] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508882, 'name': ReconfigVM_Task, 'duration_secs': 0.724255} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.037869] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Reconfigured VM instance instance-0000006d to attach disk [datastore2] 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c/74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1103.038533] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23f8cf74-d073-4224-88ea-b0da307f429d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.043154] env[69475]: DEBUG nova.scheduler.client.report [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Refreshing aggregate associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, aggregates: None {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1103.047611] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1103.047611] env[69475]: value = "task-3508884" [ 1103.047611] env[69475]: _type = "Task" [ 1103.047611] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.058200] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508884, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.070021] env[69475]: DEBUG nova.scheduler.client.report [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Refreshing trait associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1103.137978] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508883, 'name': ReconfigVM_Task, 'duration_secs': 0.362645} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.138479] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Reconfigured VM instance instance-00000062 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1103.145562] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f97ca86-a743-48bb-9af0-82e2c75304e5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.160203] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1103.160203] env[69475]: value = "task-3508885" [ 1103.160203] env[69475]: _type = "Task" [ 1103.160203] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.170709] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508885, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.206015] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cebc7576-5fd1-4e79-a626-712ff0ac4c8d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "41ddf915-343b-46e4-834e-11ab3899242f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.248s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1103.316084] env[69475]: DEBUG oslo_concurrency.lockutils [None req-623fcd89-2122-40c7-b484-5cc1c2b18e47 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.316282] env[69475]: DEBUG oslo_concurrency.lockutils [None req-623fcd89-2122-40c7-b484-5cc1c2b18e47 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquired lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1103.316458] env[69475]: DEBUG nova.network.neutron [None req-623fcd89-2122-40c7-b484-5cc1c2b18e47 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1103.335258] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3830ec9-07c0-4582-becf-05573142b6e6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.344320] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46b894e-c525-44aa-894f-3114a7c362a5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.382030] env[69475]: INFO nova.compute.manager [-] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Took 1.36 seconds to deallocate network for instance. [ 1103.385762] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-464d10b3-2591-4b56-9a96-815f8fd27a63 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.399477] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68c904e8-8c57-4016-afcf-3bedecd70cb3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.420651] env[69475]: DEBUG nova.compute.provider_tree [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1103.558933] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508884, 'name': Rename_Task, 'duration_secs': 0.434352} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.560211] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1103.560709] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f2630f3-2956-45ec-b869-818c190df9b2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.566779] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1103.566779] env[69475]: value = "task-3508886" [ 1103.566779] env[69475]: _type = "Task" [ 1103.566779] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.574535] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508886, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.676637] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508885, 'name': ReconfigVM_Task, 'duration_secs': 0.348534} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.676637] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701117', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'name': 'volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4f091501-351c-45b8-9f64-4d28d4623df8', 'attached_at': '', 'detached_at': '', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'serial': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1103.726354] env[69475]: DEBUG nova.network.neutron [req-e49b4bbf-8e17-4144-857e-07d1ee54cde3 req-e8c7e51e-8916-4d7a-b3df-911aebacd417 service nova] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updated VIF entry in instance network info cache for port 81121438-ec92-4519-97f1-e2a871109623. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1103.726354] env[69475]: DEBUG nova.network.neutron [req-e49b4bbf-8e17-4144-857e-07d1ee54cde3 req-e8c7e51e-8916-4d7a-b3df-911aebacd417 service nova] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating instance_info_cache with network_info: [{"id": "81121438-ec92-4519-97f1-e2a871109623", "address": "fa:16:3e:98:9c:8a", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81121438-ec", "ovs_interfaceid": "81121438-ec92-4519-97f1-e2a871109623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1103.895926] env[69475]: DEBUG oslo_concurrency.lockutils [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.958695] env[69475]: DEBUG nova.scheduler.client.report [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Updated inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da with generation 152 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1103.958695] env[69475]: DEBUG nova.compute.provider_tree [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Updating resource provider dd221100-68c1-4a75-92b5-b24d81fee5da generation from 152 to 153 during operation: update_inventory {{(pid=69475) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1103.958695] env[69475]: DEBUG nova.compute.provider_tree [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1104.038605] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.038605] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1104.038605] env[69475]: DEBUG nova.network.neutron [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1104.064881] env[69475]: DEBUG nova.network.neutron [None req-623fcd89-2122-40c7-b484-5cc1c2b18e47 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Updating instance_info_cache with network_info: [{"id": "0241fad0-a699-4ab6-8665-37a808867cd9", "address": "fa:16:3e:9e:27:a0", "network": {"id": "77196001-28c0-48c7-924d-a11c93289ae5", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-829999270-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d25a22195d0c4370a481a242a18f430a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0549820d-5649-40bc-ad6e-9ae27b384d90", "external-id": "nsx-vlan-transportzone-434", "segmentation_id": 434, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0241fad0-a6", "ovs_interfaceid": "0241fad0-a699-4ab6-8665-37a808867cd9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.080041] env[69475]: DEBUG oslo_vmware.api [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508886, 'name': PowerOnVM_Task, 'duration_secs': 0.49518} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.080910] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1104.081147] env[69475]: INFO nova.compute.manager [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Took 5.91 seconds to spawn the instance on the hypervisor. [ 1104.081335] env[69475]: DEBUG nova.compute.manager [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1104.084423] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3446efef-105f-48b2-bef4-39dd91a416c5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.228801] env[69475]: DEBUG oslo_concurrency.lockutils [req-e49b4bbf-8e17-4144-857e-07d1ee54cde3 req-e8c7e51e-8916-4d7a-b3df-911aebacd417 service nova] Releasing lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1104.455658] env[69475]: DEBUG nova.compute.manager [req-2cb284f0-8f75-4536-8fa6-6101af945e2e req-5164ca01-332e-48c5-bdfc-cf24db09d189 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Received event network-vif-deleted-e60a34f2-9926-41dc-a777-3d0e92f22ce9 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1104.455941] env[69475]: INFO nova.compute.manager [req-2cb284f0-8f75-4536-8fa6-6101af945e2e req-5164ca01-332e-48c5-bdfc-cf24db09d189 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Neutron deleted interface e60a34f2-9926-41dc-a777-3d0e92f22ce9; detaching it from the instance and deleting it from the info cache [ 1104.456460] env[69475]: DEBUG nova.network.neutron [req-2cb284f0-8f75-4536-8fa6-6101af945e2e req-5164ca01-332e-48c5-bdfc-cf24db09d189 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Updating instance_info_cache with network_info: [{"id": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "address": "fa:16:3e:34:5a:a2", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d51ee71-84", "ovs_interfaceid": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9d4929eb-bec9-43f8-9341-df239fb9a0a7", "address": "fa:16:3e:8b:2b:05", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d4929eb-be", "ovs_interfaceid": "9d4929eb-bec9-43f8-9341-df239fb9a0a7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.514859] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "41ddf915-343b-46e4-834e-11ab3899242f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.515125] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "41ddf915-343b-46e4-834e-11ab3899242f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.515337] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "41ddf915-343b-46e4-834e-11ab3899242f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.515520] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "41ddf915-343b-46e4-834e-11ab3899242f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.515688] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "41ddf915-343b-46e4-834e-11ab3899242f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.517342] env[69475]: DEBUG oslo_concurrency.lockutils [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.517541] env[69475]: DEBUG oslo_concurrency.lockutils [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.517725] env[69475]: DEBUG oslo_concurrency.lockutils [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.517902] env[69475]: DEBUG oslo_concurrency.lockutils [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.518368] env[69475]: DEBUG oslo_concurrency.lockutils [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.520080] env[69475]: INFO nova.compute.manager [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Terminating instance [ 1104.522317] env[69475]: INFO nova.compute.manager [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Terminating instance [ 1104.569370] env[69475]: DEBUG oslo_concurrency.lockutils [None req-623fcd89-2122-40c7-b484-5cc1c2b18e47 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Releasing lock "refresh_cache-8f18d683-7734-4798-8963-7336fe229f16" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1104.570376] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9124807c-a3dd-485f-a7eb-035e6dc5ae0f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.577755] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-623fcd89-2122-40c7-b484-5cc1c2b18e47 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Resuming the VM {{(pid=69475) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1104.577996] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1ec0a68-dbca-4f85-992e-cfcf7e33cdb5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.583787] env[69475]: DEBUG oslo_vmware.api [None req-623fcd89-2122-40c7-b484-5cc1c2b18e47 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1104.583787] env[69475]: value = "task-3508887" [ 1104.583787] env[69475]: _type = "Task" [ 1104.583787] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.592054] env[69475]: DEBUG oslo_vmware.api [None req-623fcd89-2122-40c7-b484-5cc1c2b18e47 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508887, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.601859] env[69475]: INFO nova.compute.manager [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Took 20.48 seconds to build instance. [ 1104.735067] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1104.735878] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1da6fa7d-0094-45b4-9ee3-93c45ae80589 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.742658] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1104.742658] env[69475]: value = "task-3508888" [ 1104.742658] env[69475]: _type = "Task" [ 1104.742658] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.750935] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508888, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.756950] env[69475]: INFO nova.network.neutron [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Port e60a34f2-9926-41dc-a777-3d0e92f22ce9 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1104.757169] env[69475]: INFO nova.network.neutron [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Port 9d4929eb-bec9-43f8-9341-df239fb9a0a7 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1104.757494] env[69475]: DEBUG nova.network.neutron [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Updating instance_info_cache with network_info: [{"id": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "address": "fa:16:3e:34:5a:a2", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.131", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d51ee71-84", "ovs_interfaceid": "9d51ee71-8419-4657-9a34-44bec2faf3c2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1104.959910] env[69475]: DEBUG oslo_concurrency.lockutils [req-2cb284f0-8f75-4536-8fa6-6101af945e2e req-5164ca01-332e-48c5-bdfc-cf24db09d189 service nova] Acquiring lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.969437] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.581s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.972377] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.665s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.975355] env[69475]: INFO nova.compute.claims [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1105.024297] env[69475]: DEBUG nova.compute.manager [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1105.024538] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1105.025104] env[69475]: DEBUG nova.compute.manager [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1105.025289] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1105.026167] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8502f05b-8606-4168-bf89-436013ff269b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.029969] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-317b7b14-c1dd-4960-b85d-6d493cb23d50 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.035943] env[69475]: DEBUG nova.compute.manager [req-a6fdb272-c679-4823-b560-b4df3c74c39d req-5746790c-9739-4305-9a79-89bcad4c72b1 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Received event network-vif-deleted-9d4929eb-bec9-43f8-9341-df239fb9a0a7 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1105.040510] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1105.040682] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1105.040889] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-05cf30cb-02de-4bbd-adc8-1b8b74d90430 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.042442] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3fc26568-969b-4d90-99fa-368f5735be66 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.049404] env[69475]: DEBUG oslo_vmware.api [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1105.049404] env[69475]: value = "task-3508890" [ 1105.049404] env[69475]: _type = "Task" [ 1105.049404] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.050496] env[69475]: DEBUG oslo_vmware.api [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1105.050496] env[69475]: value = "task-3508889" [ 1105.050496] env[69475]: _type = "Task" [ 1105.050496] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.062885] env[69475]: DEBUG oslo_vmware.api [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508890, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.066283] env[69475]: DEBUG oslo_vmware.api [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508889, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.094475] env[69475]: DEBUG oslo_vmware.api [None req-623fcd89-2122-40c7-b484-5cc1c2b18e47 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508887, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.104137] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d7c54fda-0695-4558-a55c-c7027e1791f7 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Lock "74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.997s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.253459] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] VM already powered off {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1105.253695] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1105.253898] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701117', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'name': 'volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4f091501-351c-45b8-9f64-4d28d4623df8', 'attached_at': '', 'detached_at': '', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'serial': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1105.254749] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03897cc5-d5e9-4949-9143-ff1e1e0df2e9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.260316] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "refresh_cache-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1105.279191] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3e471324-1c08-4ba0-b285-cba5f5b1aa05 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35-e60a34f2-9926-41dc-a777-3d0e92f22ce9" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.215s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1105.280821] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c45217-d1eb-43d6-a16f-cc3f0f143494 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.288669] env[69475]: WARNING nova.virt.vmwareapi.driver [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1105.288969] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1105.289802] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a95b8cc5-d26b-4647-9c9a-142751fb385d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.298440] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1105.299478] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4473578b-00f6-4796-a8e0-5a9a9d4d2fda {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.369840] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1105.370082] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1105.370509] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleting the datastore file [datastore2] 4f091501-351c-45b8-9f64-4d28d4623df8 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1105.370598] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1eaa232-8396-461f-b80a-4bcc34814aca {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.377291] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1105.377291] env[69475]: value = "task-3508892" [ 1105.377291] env[69475]: _type = "Task" [ 1105.377291] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.385515] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508892, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.538355] env[69475]: INFO nova.scheduler.client.report [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Deleted allocation for migration 4fac22fe-611e-4785-b07c-06b01264b8f7 [ 1105.562942] env[69475]: DEBUG oslo_vmware.api [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508890, 'name': PowerOffVM_Task, 'duration_secs': 0.305598} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.566481] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1105.566684] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1105.566955] env[69475]: DEBUG oslo_vmware.api [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508889, 'name': PowerOffVM_Task, 'duration_secs': 0.260897} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.567189] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b52512f-cf92-494f-be5b-01df6aca43d0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.568889] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1105.569093] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1105.569379] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94cb32d6-7076-4c49-a4e4-cf03dc4b3a2a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.594709] env[69475]: DEBUG oslo_vmware.api [None req-623fcd89-2122-40c7-b484-5cc1c2b18e47 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508887, 'name': PowerOnVM_Task, 'duration_secs': 0.935608} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.594837] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-623fcd89-2122-40c7-b484-5cc1c2b18e47 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Resumed the VM {{(pid=69475) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1105.595033] env[69475]: DEBUG nova.compute.manager [None req-623fcd89-2122-40c7-b484-5cc1c2b18e47 tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1105.595875] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ce2029-4187-41d5-a209-4d039a5815b8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.662626] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1105.662882] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1105.663130] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Deleting the datastore file [datastore2] 41ddf915-343b-46e4-834e-11ab3899242f {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1105.663652] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ce65ff5-9d47-4751-b0ac-604dbcc9b9cf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.672020] env[69475]: DEBUG oslo_vmware.api [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1105.672020] env[69475]: value = "task-3508895" [ 1105.672020] env[69475]: _type = "Task" [ 1105.672020] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.682540] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1105.682780] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1105.682972] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Deleting the datastore file [datastore1] 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1105.683245] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a9c54e2f-2a99-41aa-9752-0630c0094ec8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.688767] env[69475]: DEBUG oslo_vmware.api [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508895, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.693522] env[69475]: DEBUG oslo_vmware.api [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1105.693522] env[69475]: value = "task-3508896" [ 1105.693522] env[69475]: _type = "Task" [ 1105.693522] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.704521] env[69475]: DEBUG oslo_vmware.api [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508896, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.886559] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508892, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.405434} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.886893] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1105.887365] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1105.887365] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1105.908749] env[69475]: INFO nova.compute.manager [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Rebuilding instance [ 1105.952585] env[69475]: DEBUG nova.compute.manager [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1105.953466] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e765220-dae5-4853-a061-a940d484c56e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.044846] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f55b49ef-2f4c-4ef0-8291-334afb6ff59e tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "baf27027-678d-4167-bb9b-df410aeb0e82" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 16.759s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.187106] env[69475]: DEBUG oslo_vmware.api [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508895, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.371119} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.187373] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1106.187552] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1106.187730] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1106.187902] env[69475]: INFO nova.compute.manager [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1106.188302] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1106.188518] env[69475]: DEBUG nova.compute.manager [-] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1106.188676] env[69475]: DEBUG nova.network.neutron [-] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1106.203285] env[69475]: DEBUG oslo_vmware.api [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508896, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.443547} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.203519] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1106.203692] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1106.203859] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1106.204040] env[69475]: INFO nova.compute.manager [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1106.204273] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1106.204466] env[69475]: DEBUG nova.compute.manager [-] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1106.204565] env[69475]: DEBUG nova.network.neutron [-] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1106.271577] env[69475]: DEBUG neutronclient.v2_0.client [-] Error message: {"NeutronError": {"type": "PortNotFound", "message": "Port 9d4929eb-bec9-43f8-9341-df239fb9a0a7 could not be found.", "detail": ""}} {{(pid=69475) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1106.272969] env[69475]: DEBUG nova.network.neutron [-] Unable to show port 9d4929eb-bec9-43f8-9341-df239fb9a0a7 as it no longer exists. {{(pid=69475) _unbind_ports /opt/stack/nova/nova/network/neutron.py:666}} [ 1106.333357] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bcf5644-c594-4004-9fe1-aa428fc5d5b4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.341418] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ebf9b34-f0f4-4f56-b484-4685c09aa4b1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.372838] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2855b66-20d5-40e3-89af-1a5180728854 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.381560] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7891242-0f36-4c65-87d9-32c20b49945c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.399017] env[69475]: INFO nova.virt.block_device [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Booting with volume 53fb465d-7ef8-4cfc-bb5b-08b0d766b68a at /dev/sdb [ 1106.399218] env[69475]: DEBUG nova.compute.provider_tree [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1106.446614] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-80c0d35f-5a41-4654-815f-421fb83047a5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.455880] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64606479-1c1f-4d96-b68f-6a750333bda3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.492666] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0daa292c-f0a5-4f73-a002-8335971ccecd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.501130] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-638dc3ec-b003-4534-b1b4-91a0f6f668b1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.534738] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d560dbf-12e3-42ad-97fe-9482a1a53e90 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.541682] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eae08bf6-951d-4c72-b7e6-9e3a81982f4c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.557715] env[69475]: DEBUG nova.virt.block_device [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Updating existing volume attachment record: f9b72cca-e07f-4553-a077-12ed9beb0c74 {{(pid=69475) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1106.766339] env[69475]: DEBUG nova.compute.manager [req-b581efb1-f68e-41af-bf0c-2e759b38e9d6 req-bd50b09c-1c9e-4355-8b1b-c1d938383720 service nova] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Received event network-vif-deleted-f493e873-49de-4112-9562-cbb7d23892c8 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1106.766555] env[69475]: INFO nova.compute.manager [req-b581efb1-f68e-41af-bf0c-2e759b38e9d6 req-bd50b09c-1c9e-4355-8b1b-c1d938383720 service nova] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Neutron deleted interface f493e873-49de-4112-9562-cbb7d23892c8; detaching it from the instance and deleting it from the info cache [ 1106.767329] env[69475]: DEBUG nova.network.neutron [req-b581efb1-f68e-41af-bf0c-2e759b38e9d6 req-bd50b09c-1c9e-4355-8b1b-c1d938383720 service nova] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1106.774307] env[69475]: DEBUG oslo_concurrency.lockutils [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "baf27027-678d-4167-bb9b-df410aeb0e82" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.774563] env[69475]: DEBUG oslo_concurrency.lockutils [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "baf27027-678d-4167-bb9b-df410aeb0e82" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.774774] env[69475]: DEBUG oslo_concurrency.lockutils [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "baf27027-678d-4167-bb9b-df410aeb0e82-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.774959] env[69475]: DEBUG oslo_concurrency.lockutils [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "baf27027-678d-4167-bb9b-df410aeb0e82-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.775144] env[69475]: DEBUG oslo_concurrency.lockutils [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "baf27027-678d-4167-bb9b-df410aeb0e82-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.777491] env[69475]: INFO nova.compute.manager [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Terminating instance [ 1106.924891] env[69475]: ERROR nova.scheduler.client.report [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [req-d100323e-c2af-49bb-99f3-b1a77cb5fc43] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dd221100-68c1-4a75-92b5-b24d81fee5da. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d100323e-c2af-49bb-99f3-b1a77cb5fc43"}]} [ 1106.948298] env[69475]: DEBUG nova.scheduler.client.report [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Refreshing inventories for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1106.963672] env[69475]: DEBUG nova.scheduler.client.report [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Updating ProviderTree inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1106.963904] env[69475]: DEBUG nova.compute.provider_tree [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 88, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1106.972416] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1106.973683] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f904f6bc-1110-4a36-97f4-2973013de9a0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.979452] env[69475]: DEBUG nova.scheduler.client.report [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Refreshing aggregate associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, aggregates: None {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1106.988604] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1106.988604] env[69475]: value = "task-3508897" [ 1106.988604] env[69475]: _type = "Task" [ 1106.988604] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.997790] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508897, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.007353] env[69475]: DEBUG nova.scheduler.client.report [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Refreshing trait associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1107.073299] env[69475]: DEBUG nova.compute.manager [req-234788d6-f6e7-4f16-87fe-0f4dd9869922 req-b4c38548-1778-48f1-b0c8-d182c3522d13 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Received event network-vif-deleted-9d51ee71-8419-4657-9a34-44bec2faf3c2 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1107.073560] env[69475]: INFO nova.compute.manager [req-234788d6-f6e7-4f16-87fe-0f4dd9869922 req-b4c38548-1778-48f1-b0c8-d182c3522d13 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Neutron deleted interface 9d51ee71-8419-4657-9a34-44bec2faf3c2; detaching it from the instance and deleting it from the info cache [ 1107.073780] env[69475]: DEBUG nova.network.neutron [req-234788d6-f6e7-4f16-87fe-0f4dd9869922 req-b4c38548-1778-48f1-b0c8-d182c3522d13 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.200780] env[69475]: DEBUG nova.network.neutron [-] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.270811] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e94fa742-8e7f-4d8b-a9ed-bc24766bfec1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.284243] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b91b7b3-2f1e-41d4-ac4f-fbfa2ed1302c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.299104] env[69475]: DEBUG nova.compute.manager [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1107.299104] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1107.299430] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379f6c60-00f7-4311-b16a-3c83825ec764 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.302817] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9dd691e-0ec2-484e-aea4-9ba1b7e90a3c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.312075] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf438737-505d-4918-aedb-6ceab5275e29 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.315101] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1107.315336] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-99c067c3-7663-4ea3-871c-1fb19ea73073 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.327347] env[69475]: DEBUG nova.compute.manager [req-b581efb1-f68e-41af-bf0c-2e759b38e9d6 req-bd50b09c-1c9e-4355-8b1b-c1d938383720 service nova] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Detach interface failed, port_id=f493e873-49de-4112-9562-cbb7d23892c8, reason: Instance 41ddf915-343b-46e4-834e-11ab3899242f could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1107.360119] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ad751a-0286-4aa8-840b-db45b4ff661a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.362900] env[69475]: DEBUG oslo_vmware.api [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1107.362900] env[69475]: value = "task-3508898" [ 1107.362900] env[69475]: _type = "Task" [ 1107.362900] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.370560] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00d9b4ac-6450-437d-a4d7-a420161e4a45 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.377321] env[69475]: DEBUG oslo_vmware.api [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508898, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.388987] env[69475]: DEBUG nova.compute.provider_tree [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1107.414822] env[69475]: DEBUG nova.network.neutron [-] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.496777] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508897, 'name': PowerOffVM_Task, 'duration_secs': 0.159053} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.497076] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1107.497315] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1107.498548] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09286711-b1c8-40d4-901f-2b1588915ef0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.505058] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1107.505310] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22b17f2e-5e60-4c5f-acbf-385ea41f2991 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.534486] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1107.534486] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1107.534486] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Deleting the datastore file [datastore2] 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1107.534486] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2955ee0e-d29e-4736-b564-a362da242177 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.545276] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1107.545276] env[69475]: value = "task-3508900" [ 1107.545276] env[69475]: _type = "Task" [ 1107.545276] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.553873] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508900, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.579448] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d35693e4-2ef2-43d2-a158-189778329ba2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.587165] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06dd6e21-59f6-4e22-9d3e-603580d2510f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.628129] env[69475]: DEBUG nova.compute.manager [req-234788d6-f6e7-4f16-87fe-0f4dd9869922 req-b4c38548-1778-48f1-b0c8-d182c3522d13 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Detach interface failed, port_id=9d51ee71-8419-4657-9a34-44bec2faf3c2, reason: Instance 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1107.703712] env[69475]: INFO nova.compute.manager [-] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Took 1.51 seconds to deallocate network for instance. [ 1107.872519] env[69475]: DEBUG oslo_vmware.api [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508898, 'name': PowerOffVM_Task, 'duration_secs': 0.362149} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.872817] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1107.872985] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1107.873269] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-862d5102-30c5-4325-9128-2f3fd899242e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.917978] env[69475]: INFO nova.compute.manager [-] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Took 1.71 seconds to deallocate network for instance. [ 1107.920650] env[69475]: DEBUG nova.scheduler.client.report [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Updated inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da with generation 154 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1107.920892] env[69475]: DEBUG nova.compute.provider_tree [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Updating resource provider dd221100-68c1-4a75-92b5-b24d81fee5da generation from 154 to 155 during operation: update_inventory {{(pid=69475) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1107.921106] env[69475]: DEBUG nova.compute.provider_tree [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1107.946123] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1107.946123] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1107.946511] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Deleting the datastore file [datastore2] baf27027-678d-4167-bb9b-df410aeb0e82 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1107.946511] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-27c122a9-3bb2-4e1e-b5b1-8eac548a4c94 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.953346] env[69475]: DEBUG oslo_vmware.api [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1107.953346] env[69475]: value = "task-3508902" [ 1107.953346] env[69475]: _type = "Task" [ 1107.953346] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.963264] env[69475]: DEBUG oslo_vmware.api [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508902, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.053653] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508900, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129285} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.053907] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1108.054183] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1108.054378] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1108.210596] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.427881] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.455s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.428433] env[69475]: DEBUG nova.compute.manager [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1108.431254] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.691s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.432722] env[69475]: INFO nova.compute.claims [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1108.435493] env[69475]: DEBUG oslo_concurrency.lockutils [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1108.462707] env[69475]: DEBUG oslo_vmware.api [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508902, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.707814] env[69475]: DEBUG nova.virt.hardware [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1108.708142] env[69475]: DEBUG nova.virt.hardware [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1108.708350] env[69475]: DEBUG nova.virt.hardware [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1108.708586] env[69475]: DEBUG nova.virt.hardware [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1108.708780] env[69475]: DEBUG nova.virt.hardware [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1108.708974] env[69475]: DEBUG nova.virt.hardware [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1108.709259] env[69475]: DEBUG nova.virt.hardware [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1108.709466] env[69475]: DEBUG nova.virt.hardware [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1108.709684] env[69475]: DEBUG nova.virt.hardware [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1108.709892] env[69475]: DEBUG nova.virt.hardware [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1108.710127] env[69475]: DEBUG nova.virt.hardware [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1108.711375] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-721346a5-5639-4195-92b4-91e8c5ae154a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.720318] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-741766df-2b21-412d-91db-41265ed51373 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.735774] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:dc:db:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee9ce73d-4ee8-4b28-b7d3-3a5735039627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fbde5d12-5376-4f30-a0eb-1e63c7d36242', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1108.743635] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1108.743906] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1108.744146] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ec047342-7224-4eda-ab02-b347bd112930 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.765023] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1108.765023] env[69475]: value = "task-3508903" [ 1108.765023] env[69475]: _type = "Task" [ 1108.765023] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.774513] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508903, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.938185] env[69475]: DEBUG nova.compute.utils [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1108.942519] env[69475]: DEBUG nova.compute.manager [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1108.942693] env[69475]: DEBUG nova.network.neutron [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1108.964665] env[69475]: DEBUG oslo_vmware.api [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508902, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.531951} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.965021] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1108.965098] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1108.965280] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1108.965458] env[69475]: INFO nova.compute.manager [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1108.965690] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1108.965965] env[69475]: DEBUG nova.compute.manager [-] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1108.965965] env[69475]: DEBUG nova.network.neutron [-] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1108.993244] env[69475]: DEBUG nova.policy [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f164f821924c4f4aae565d7352fef4a0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f8ffeef220f04d9eb22ef69b68e9c34a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1109.089562] env[69475]: DEBUG nova.virt.hardware [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1109.089819] env[69475]: DEBUG nova.virt.hardware [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1109.089977] env[69475]: DEBUG nova.virt.hardware [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1109.090802] env[69475]: DEBUG nova.virt.hardware [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1109.091141] env[69475]: DEBUG nova.virt.hardware [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1109.091319] env[69475]: DEBUG nova.virt.hardware [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1109.091536] env[69475]: DEBUG nova.virt.hardware [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1109.091715] env[69475]: DEBUG nova.virt.hardware [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1109.091895] env[69475]: DEBUG nova.virt.hardware [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1109.092081] env[69475]: DEBUG nova.virt.hardware [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1109.092286] env[69475]: DEBUG nova.virt.hardware [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1109.093623] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3fcd954-9987-4c62-ab6e-1855d6f0ac0c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.102185] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef57920-f04f-411b-ac85-61f6b69e1fd6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.117721] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Instance VIF info [] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1109.123765] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1109.124114] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1109.124344] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aabea363-1eec-47f2-93fd-0e1f07e1a043 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.142928] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1109.142928] env[69475]: value = "task-3508904" [ 1109.142928] env[69475]: _type = "Task" [ 1109.142928] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.151754] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508904, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.275655] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508903, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.332587] env[69475]: DEBUG nova.network.neutron [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Successfully created port: 75df31f7-58d6-423b-80c5-e46458f30a93 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1109.443192] env[69475]: DEBUG nova.compute.manager [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1109.660412] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508904, 'name': CreateVM_Task, 'duration_secs': 0.484536} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.660648] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1109.661183] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.661255] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.661607] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1109.661841] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1410ad67-039b-4000-b0dd-87a78f127684 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.669236] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1109.669236] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f44c55-61b3-36ab-0f9b-3deba7167301" [ 1109.669236] env[69475]: _type = "Task" [ 1109.669236] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.680022] env[69475]: DEBUG nova.compute.manager [req-ae4fe6e7-7e68-4bf8-ae46-6117d8e5c6f9 req-9bfe5325-0e1c-436a-8c33-a9562e35062d service nova] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Received event network-vif-deleted-4059da75-efc8-42ee-90b1-8202220d1621 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1109.680248] env[69475]: INFO nova.compute.manager [req-ae4fe6e7-7e68-4bf8-ae46-6117d8e5c6f9 req-9bfe5325-0e1c-436a-8c33-a9562e35062d service nova] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Neutron deleted interface 4059da75-efc8-42ee-90b1-8202220d1621; detaching it from the instance and deleting it from the info cache [ 1109.680435] env[69475]: DEBUG nova.network.neutron [req-ae4fe6e7-7e68-4bf8-ae46-6117d8e5c6f9 req-9bfe5325-0e1c-436a-8c33-a9562e35062d service nova] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.687594] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f44c55-61b3-36ab-0f9b-3deba7167301, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.752947] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-219da50e-552f-4b31-bf15-e09ce76559ea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.761433] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc27ee3-5d2a-4c97-9614-1549895e1168 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.797647] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-374e3bd8-3c95-44c9-83c7-f283f384db76 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.802273] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508903, 'name': CreateVM_Task, 'duration_secs': 0.533143} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.802912] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1109.807021] env[69475]: DEBUG oslo_concurrency.lockutils [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.808058] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98377f4d-0566-4c91-babf-a89cd2fc31db {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.823633] env[69475]: DEBUG nova.compute.provider_tree [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1110.154684] env[69475]: DEBUG nova.network.neutron [-] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.183011] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f44c55-61b3-36ab-0f9b-3deba7167301, 'name': SearchDatastore_Task, 'duration_secs': 0.019523} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.183265] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94b5a72e-9c27-4f9a-8e20-098e94b1d5a3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.185331] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.185579] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1110.185801] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.185947] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.186141] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1110.186414] env[69475]: DEBUG oslo_concurrency.lockutils [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.186696] env[69475]: DEBUG oslo_concurrency.lockutils [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1110.186897] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f8023e4-c2c7-476e-a5f7-14a2aca06573 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.188816] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0131ced-dccc-4fc0-9960-ab330f3f980d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.195082] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66faa18-dba4-419a-92c4-575815b0a35a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.207303] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1110.207481] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1110.208261] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1110.208261] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5206eb3d-3b97-a713-85e7-1d6e63ecaefb" [ 1110.208261] env[69475]: _type = "Task" [ 1110.208261] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.208774] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45f49fb9-27b2-49c6-b2c5-57c42290e315 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.216394] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1110.216394] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52eaf672-6335-848d-fd7c-eb9cbc543883" [ 1110.216394] env[69475]: _type = "Task" [ 1110.216394] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.233656] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5206eb3d-3b97-a713-85e7-1d6e63ecaefb, 'name': SearchDatastore_Task, 'duration_secs': 0.02473} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.233955] env[69475]: DEBUG nova.compute.manager [req-ae4fe6e7-7e68-4bf8-ae46-6117d8e5c6f9 req-9bfe5325-0e1c-436a-8c33-a9562e35062d service nova] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Detach interface failed, port_id=4059da75-efc8-42ee-90b1-8202220d1621, reason: Instance baf27027-678d-4167-bb9b-df410aeb0e82 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1110.234785] env[69475]: DEBUG oslo_concurrency.lockutils [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.235049] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1110.235266] env[69475]: DEBUG oslo_concurrency.lockutils [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.238978] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52eaf672-6335-848d-fd7c-eb9cbc543883, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.331093] env[69475]: DEBUG nova.scheduler.client.report [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1110.458397] env[69475]: DEBUG nova.compute.manager [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1110.486997] env[69475]: DEBUG nova.virt.hardware [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1110.487262] env[69475]: DEBUG nova.virt.hardware [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1110.487421] env[69475]: DEBUG nova.virt.hardware [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1110.487602] env[69475]: DEBUG nova.virt.hardware [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1110.487752] env[69475]: DEBUG nova.virt.hardware [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1110.487900] env[69475]: DEBUG nova.virt.hardware [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1110.488124] env[69475]: DEBUG nova.virt.hardware [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1110.488288] env[69475]: DEBUG nova.virt.hardware [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1110.488453] env[69475]: DEBUG nova.virt.hardware [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1110.488612] env[69475]: DEBUG nova.virt.hardware [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1110.488784] env[69475]: DEBUG nova.virt.hardware [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1110.489651] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83635959-3626-4baa-a4f3-dd395972bf2c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.497365] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c662b67-cea0-4dc8-aaaa-4a238235dce5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.657165] env[69475]: INFO nova.compute.manager [-] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Took 1.69 seconds to deallocate network for instance. [ 1110.727337] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52eaf672-6335-848d-fd7c-eb9cbc543883, 'name': SearchDatastore_Task, 'duration_secs': 0.035176} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.729067] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cae8b36a-60d1-4fea-a7dd-7c2cd38903b5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.733927] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1110.733927] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527e7a1d-eacc-d2cd-09bb-3b0ab6b3c484" [ 1110.733927] env[69475]: _type = "Task" [ 1110.733927] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.741208] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527e7a1d-eacc-d2cd-09bb-3b0ab6b3c484, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.836302] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.405s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.836807] env[69475]: DEBUG nova.compute.manager [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1110.839743] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.277s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.841116] env[69475]: INFO nova.compute.claims [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1111.098354] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "8f18d683-7734-4798-8963-7336fe229f16" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.098354] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "8f18d683-7734-4798-8963-7336fe229f16" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.098354] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "8f18d683-7734-4798-8963-7336fe229f16-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.098354] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "8f18d683-7734-4798-8963-7336fe229f16-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.098354] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "8f18d683-7734-4798-8963-7336fe229f16-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.100122] env[69475]: INFO nova.compute.manager [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Terminating instance [ 1111.149201] env[69475]: DEBUG nova.compute.manager [req-b77b4123-3689-47ef-bbfc-2f78752b7d3a req-88949f7b-044d-47d1-8917-4ed8f1c830eb service nova] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Received event network-vif-plugged-75df31f7-58d6-423b-80c5-e46458f30a93 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1111.149426] env[69475]: DEBUG oslo_concurrency.lockutils [req-b77b4123-3689-47ef-bbfc-2f78752b7d3a req-88949f7b-044d-47d1-8917-4ed8f1c830eb service nova] Acquiring lock "1459221f-4c35-4a49-a8c0-f8b4ee3e2265-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.149638] env[69475]: DEBUG oslo_concurrency.lockutils [req-b77b4123-3689-47ef-bbfc-2f78752b7d3a req-88949f7b-044d-47d1-8917-4ed8f1c830eb service nova] Lock "1459221f-4c35-4a49-a8c0-f8b4ee3e2265-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.149801] env[69475]: DEBUG oslo_concurrency.lockutils [req-b77b4123-3689-47ef-bbfc-2f78752b7d3a req-88949f7b-044d-47d1-8917-4ed8f1c830eb service nova] Lock "1459221f-4c35-4a49-a8c0-f8b4ee3e2265-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.149994] env[69475]: DEBUG nova.compute.manager [req-b77b4123-3689-47ef-bbfc-2f78752b7d3a req-88949f7b-044d-47d1-8917-4ed8f1c830eb service nova] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] No waiting events found dispatching network-vif-plugged-75df31f7-58d6-423b-80c5-e46458f30a93 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1111.150189] env[69475]: WARNING nova.compute.manager [req-b77b4123-3689-47ef-bbfc-2f78752b7d3a req-88949f7b-044d-47d1-8917-4ed8f1c830eb service nova] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Received unexpected event network-vif-plugged-75df31f7-58d6-423b-80c5-e46458f30a93 for instance with vm_state building and task_state spawning. [ 1111.164147] env[69475]: DEBUG oslo_concurrency.lockutils [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.212641] env[69475]: DEBUG nova.network.neutron [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Successfully updated port: 75df31f7-58d6-423b-80c5-e46458f30a93 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1111.248296] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527e7a1d-eacc-d2cd-09bb-3b0ab6b3c484, 'name': SearchDatastore_Task, 'duration_secs': 0.009865} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.248565] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1111.248832] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c/74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1111.249415] env[69475]: DEBUG oslo_concurrency.lockutils [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.249618] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1111.249838] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b1181202-ae39-4205-b0a6-354646fa85b3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.252528] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c58c7e82-c31f-4d58-a18a-824e5f0ee333 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.259662] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1111.259662] env[69475]: value = "task-3508905" [ 1111.259662] env[69475]: _type = "Task" [ 1111.259662] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.263665] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1111.263838] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1111.264843] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78b281b0-4191-4daf-bac2-b5d2e86ed957 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.269598] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508905, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.272420] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1111.272420] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523128ba-dc20-569b-88bf-aaab6881fe5b" [ 1111.272420] env[69475]: _type = "Task" [ 1111.272420] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.280139] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523128ba-dc20-569b-88bf-aaab6881fe5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.350053] env[69475]: DEBUG nova.compute.utils [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1111.350426] env[69475]: DEBUG nova.compute.manager [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Not allocating networking since 'none' was specified. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1111.604292] env[69475]: DEBUG nova.compute.manager [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1111.604585] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1111.605535] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cefd01f9-038e-488f-8664-be5cced04fae {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.613626] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1111.613923] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d658770f-23a8-4f3e-916a-ce233c0ac8f3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.620798] env[69475]: DEBUG oslo_vmware.api [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1111.620798] env[69475]: value = "task-3508906" [ 1111.620798] env[69475]: _type = "Task" [ 1111.620798] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.629132] env[69475]: DEBUG oslo_vmware.api [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508906, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.715761] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "refresh_cache-1459221f-4c35-4a49-a8c0-f8b4ee3e2265" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1111.715939] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "refresh_cache-1459221f-4c35-4a49-a8c0-f8b4ee3e2265" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.716131] env[69475]: DEBUG nova.network.neutron [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1111.768987] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508905, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.431536} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.769882] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c/74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1111.769882] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1111.769882] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4f231bde-1bd1-4b21-9893-7de663d768fd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.777474] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1111.777474] env[69475]: value = "task-3508907" [ 1111.777474] env[69475]: _type = "Task" [ 1111.777474] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.785289] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523128ba-dc20-569b-88bf-aaab6881fe5b, 'name': SearchDatastore_Task, 'duration_secs': 0.007892} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.786592] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-241a0330-edad-4108-9d2d-ba5a72eeeeea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.792074] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508907, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.795147] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1111.795147] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5236e068-4bee-bbfd-a442-e85fe829f00d" [ 1111.795147] env[69475]: _type = "Task" [ 1111.795147] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.802893] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5236e068-4bee-bbfd-a442-e85fe829f00d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.854603] env[69475]: DEBUG nova.compute.manager [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1112.067514] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef1d5f3-b198-4f84-be8e-07d040958d0b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.075345] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-806b3778-28d1-4440-b217-2229e793e9a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.106334] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d3a6b79-3df2-4eac-9cac-e27657d012b3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.114362] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6073b55b-942c-42ab-a1a3-322449aa058e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.128069] env[69475]: DEBUG nova.compute.provider_tree [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1112.136974] env[69475]: DEBUG oslo_vmware.api [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508906, 'name': PowerOffVM_Task, 'duration_secs': 0.20927} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.137833] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1112.138034] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1112.138271] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c667bcc0-5f34-422f-848f-097aa5f8674e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.246343] env[69475]: DEBUG nova.network.neutron [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1112.292068] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508907, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061467} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.292341] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1112.293146] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45bd99b1-46b9-476d-9855-269c1c2228bb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.303465] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5236e068-4bee-bbfd-a442-e85fe829f00d, 'name': SearchDatastore_Task, 'duration_secs': 0.00966} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.310075] env[69475]: DEBUG oslo_concurrency.lockutils [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.310349] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 4f091501-351c-45b8-9f64-4d28d4623df8/4f091501-351c-45b8-9f64-4d28d4623df8.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1112.324048] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Reconfiguring VM instance instance-0000006d to attach disk [datastore2] 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c/74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1112.326672] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72b86565-94c3-414a-924c-c1985bdf77d2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.328476] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce7b7147-a502-4550-85b5-6ae7dcc5944d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.347885] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1112.347885] env[69475]: value = "task-3508909" [ 1112.347885] env[69475]: _type = "Task" [ 1112.347885] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.349172] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1112.349172] env[69475]: value = "task-3508910" [ 1112.349172] env[69475]: _type = "Task" [ 1112.349172] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.363355] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508909, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.366766] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508910, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.405315] env[69475]: DEBUG nova.network.neutron [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Updating instance_info_cache with network_info: [{"id": "75df31f7-58d6-423b-80c5-e46458f30a93", "address": "fa:16:3e:84:d9:20", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75df31f7-58", "ovs_interfaceid": "75df31f7-58d6-423b-80c5-e46458f30a93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.634019] env[69475]: DEBUG nova.scheduler.client.report [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1112.643018] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1112.643277] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1112.643469] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Deleting the datastore file [datastore1] 8f18d683-7734-4798-8963-7336fe229f16 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1112.643758] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ee943a6e-68e6-46c9-bc0e-5c0eb4a229dd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.650245] env[69475]: DEBUG oslo_vmware.api [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for the task: (returnval){ [ 1112.650245] env[69475]: value = "task-3508911" [ 1112.650245] env[69475]: _type = "Task" [ 1112.650245] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.659398] env[69475]: DEBUG oslo_vmware.api [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508911, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.863318] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508910, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.866798] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508909, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.868057] env[69475]: DEBUG nova.compute.manager [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1112.899005] env[69475]: DEBUG nova.virt.hardware [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1112.899349] env[69475]: DEBUG nova.virt.hardware [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1112.899576] env[69475]: DEBUG nova.virt.hardware [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1112.899814] env[69475]: DEBUG nova.virt.hardware [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1112.899979] env[69475]: DEBUG nova.virt.hardware [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1112.900212] env[69475]: DEBUG nova.virt.hardware [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1112.900441] env[69475]: DEBUG nova.virt.hardware [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1112.900652] env[69475]: DEBUG nova.virt.hardware [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1112.900851] env[69475]: DEBUG nova.virt.hardware [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1112.901038] env[69475]: DEBUG nova.virt.hardware [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1112.901277] env[69475]: DEBUG nova.virt.hardware [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1112.902347] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fff2329-98c8-4140-8148-57e54f85b481 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.907897] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "refresh_cache-1459221f-4c35-4a49-a8c0-f8b4ee3e2265" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.908311] env[69475]: DEBUG nova.compute.manager [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Instance network_info: |[{"id": "75df31f7-58d6-423b-80c5-e46458f30a93", "address": "fa:16:3e:84:d9:20", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75df31f7-58", "ovs_interfaceid": "75df31f7-58d6-423b-80c5-e46458f30a93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1112.911266] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:84:d9:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b80dd748-3d7e-4a23-a38d-9e79a3881452', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75df31f7-58d6-423b-80c5-e46458f30a93', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1112.920311] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1112.921150] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1112.922525] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87558c44-073f-49ab-b096-683d7edc45b5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.927636] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e0f8484-fbc9-4e95-a572-0181a3da849c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.959704] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Instance VIF info [] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1112.966029] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Creating folder: Project (21f1e6c4906f442c98c4348651b1bdc4). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1112.967871] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0a637637-de17-4779-8c7b-8b0ffbe4e625 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.969949] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1112.969949] env[69475]: value = "task-3508912" [ 1112.969949] env[69475]: _type = "Task" [ 1112.969949] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.979650] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508912, 'name': CreateVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.981070] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Created folder: Project (21f1e6c4906f442c98c4348651b1bdc4) in parent group-v700823. [ 1112.981430] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Creating folder: Instances. Parent ref: group-v701124. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1112.981733] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-779fc7fb-1e23-4ec0-ac8d-b16f6782c784 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.993256] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Created folder: Instances in parent group-v701124. [ 1112.993527] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1112.993749] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1112.993958] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1036b327-493d-4aee-bb17-c58ffb94ff8f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.011323] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1113.011323] env[69475]: value = "task-3508915" [ 1113.011323] env[69475]: _type = "Task" [ 1113.011323] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.020109] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508915, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.139592] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.300s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.140969] env[69475]: DEBUG nova.compute.manager [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1113.142872] env[69475]: DEBUG oslo_concurrency.lockutils [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.248s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.143425] env[69475]: DEBUG nova.objects.instance [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Lazy-loading 'resources' on Instance uuid 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1113.160671] env[69475]: DEBUG oslo_vmware.api [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Task: {'id': task-3508911, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.382489} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.160913] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1113.161113] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1113.161292] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1113.161463] env[69475]: INFO nova.compute.manager [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Took 1.56 seconds to destroy the instance on the hypervisor. [ 1113.161697] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1113.161885] env[69475]: DEBUG nova.compute.manager [-] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1113.161980] env[69475]: DEBUG nova.network.neutron [-] [instance: 8f18d683-7734-4798-8963-7336fe229f16] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1113.234700] env[69475]: DEBUG nova.compute.manager [req-470248b2-5cb2-495a-ab75-b359f4ed12ec req-bc865502-61ae-4f1e-9454-e5aa8db94557 service nova] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Received event network-changed-75df31f7-58d6-423b-80c5-e46458f30a93 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1113.234700] env[69475]: DEBUG nova.compute.manager [req-470248b2-5cb2-495a-ab75-b359f4ed12ec req-bc865502-61ae-4f1e-9454-e5aa8db94557 service nova] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Refreshing instance network info cache due to event network-changed-75df31f7-58d6-423b-80c5-e46458f30a93. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1113.234700] env[69475]: DEBUG oslo_concurrency.lockutils [req-470248b2-5cb2-495a-ab75-b359f4ed12ec req-bc865502-61ae-4f1e-9454-e5aa8db94557 service nova] Acquiring lock "refresh_cache-1459221f-4c35-4a49-a8c0-f8b4ee3e2265" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.234832] env[69475]: DEBUG oslo_concurrency.lockutils [req-470248b2-5cb2-495a-ab75-b359f4ed12ec req-bc865502-61ae-4f1e-9454-e5aa8db94557 service nova] Acquired lock "refresh_cache-1459221f-4c35-4a49-a8c0-f8b4ee3e2265" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.236056] env[69475]: DEBUG nova.network.neutron [req-470248b2-5cb2-495a-ab75-b359f4ed12ec req-bc865502-61ae-4f1e-9454-e5aa8db94557 service nova] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Refreshing network info cache for port 75df31f7-58d6-423b-80c5-e46458f30a93 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1113.366326] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508909, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.74445} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.369739] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 4f091501-351c-45b8-9f64-4d28d4623df8/4f091501-351c-45b8-9f64-4d28d4623df8.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1113.369954] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1113.370395] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508910, 'name': ReconfigVM_Task, 'duration_secs': 0.815838} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.370679] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5b03499f-74fc-442f-a00c-e7d4e997cc4b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.372928] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Reconfigured VM instance instance-0000006d to attach disk [datastore2] 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c/74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1113.373547] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d6364edc-692f-41e0-998b-b32b1fa3a939 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.379861] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1113.379861] env[69475]: value = "task-3508916" [ 1113.379861] env[69475]: _type = "Task" [ 1113.379861] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.381195] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1113.381195] env[69475]: value = "task-3508917" [ 1113.381195] env[69475]: _type = "Task" [ 1113.381195] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.391962] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508916, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.395217] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508917, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.480338] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508912, 'name': CreateVM_Task, 'duration_secs': 0.422066} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.480507] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1113.481383] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.481542] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.481859] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1113.482272] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2b6e53b-60e0-49dc-afce-7e4fb040b2ee {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.486760] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1113.486760] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5204aca8-a766-ddbd-acfa-ad54b11b1638" [ 1113.486760] env[69475]: _type = "Task" [ 1113.486760] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.494223] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5204aca8-a766-ddbd-acfa-ad54b11b1638, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.519882] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508915, 'name': CreateVM_Task, 'duration_secs': 0.298288} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.520041] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1113.520404] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.646760] env[69475]: DEBUG nova.compute.utils [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1113.651586] env[69475]: DEBUG nova.compute.manager [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1113.651824] env[69475]: DEBUG nova.network.neutron [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1113.706885] env[69475]: DEBUG nova.policy [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '50223677b1b84004ad2ae335882b0bf2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52941494ff1643f6bb75cc1320a86b88', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1113.869244] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78958237-c682-43cb-a2b6-41e796c86f70 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.877045] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-467de955-94e5-4694-bda0-04a1f545d82e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.914282] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f6c780a-1a5a-44fc-a62d-07cfc13ffdb1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.919529] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508916, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065982} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.920752] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1113.921468] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6597d1b3-532a-4ebf-963d-e0aefb15d0f1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.928317] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508917, 'name': Rename_Task, 'duration_secs': 0.130241} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.929740] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20225ed9-0051-4d56-bc36-64c21107fbf7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.933540] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1113.944527] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-085be6b7-26e2-4f93-bf43-554c23865f87 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.946221] env[69475]: DEBUG nova.network.neutron [-] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.958188] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 4f091501-351c-45b8-9f64-4d28d4623df8/4f091501-351c-45b8-9f64-4d28d4623df8.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1113.959191] env[69475]: DEBUG nova.network.neutron [req-470248b2-5cb2-495a-ab75-b359f4ed12ec req-bc865502-61ae-4f1e-9454-e5aa8db94557 service nova] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Updated VIF entry in instance network info cache for port 75df31f7-58d6-423b-80c5-e46458f30a93. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1113.959365] env[69475]: DEBUG nova.network.neutron [req-470248b2-5cb2-495a-ab75-b359f4ed12ec req-bc865502-61ae-4f1e-9454-e5aa8db94557 service nova] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Updating instance_info_cache with network_info: [{"id": "75df31f7-58d6-423b-80c5-e46458f30a93", "address": "fa:16:3e:84:d9:20", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75df31f7-58", "ovs_interfaceid": "75df31f7-58d6-423b-80c5-e46458f30a93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.964567] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-176fe241-f6b9-4212-a5b8-c4e16884c4d9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.987007] env[69475]: DEBUG nova.compute.provider_tree [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1113.993887] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1113.993887] env[69475]: value = "task-3508918" [ 1113.993887] env[69475]: _type = "Task" [ 1113.993887] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.995706] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1113.995706] env[69475]: value = "task-3508919" [ 1113.995706] env[69475]: _type = "Task" [ 1113.995706] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.004696] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5204aca8-a766-ddbd-acfa-ad54b11b1638, 'name': SearchDatastore_Task, 'duration_secs': 0.01051} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.005477] env[69475]: DEBUG nova.network.neutron [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Successfully created port: 30580d6c-38c0-4196-9332-ebfc62061eaa {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1114.007770] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.008011] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1114.008301] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.008453] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1114.008637] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1114.012581] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1114.012896] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1114.013163] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3115ab40-0d4e-4f24-947b-ba0340a8dea2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.015121] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508918, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.018254] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d62c3b6-5805-4248-ac9f-ae2a91522bff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.019942] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508919, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.024065] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1114.024065] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ed12fb-99f5-ff8e-e402-0d29ce762f9d" [ 1114.024065] env[69475]: _type = "Task" [ 1114.024065] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.028070] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1114.028267] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1114.029283] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3374fd2c-7c24-4a35-8c1c-1dfd350e073d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.034053] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ed12fb-99f5-ff8e-e402-0d29ce762f9d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.036874] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1114.036874] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521ea770-b32e-5c02-d765-10a991542f7b" [ 1114.036874] env[69475]: _type = "Task" [ 1114.036874] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.043912] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521ea770-b32e-5c02-d765-10a991542f7b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.152080] env[69475]: DEBUG nova.compute.manager [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1114.449152] env[69475]: INFO nova.compute.manager [-] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Took 1.29 seconds to deallocate network for instance. [ 1114.486748] env[69475]: DEBUG oslo_concurrency.lockutils [req-470248b2-5cb2-495a-ab75-b359f4ed12ec req-bc865502-61ae-4f1e-9454-e5aa8db94557 service nova] Releasing lock "refresh_cache-1459221f-4c35-4a49-a8c0-f8b4ee3e2265" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.490248] env[69475]: DEBUG nova.scheduler.client.report [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1114.506582] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508918, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.511453] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508919, 'name': ReconfigVM_Task, 'duration_secs': 0.438243} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.511702] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 4f091501-351c-45b8-9f64-4d28d4623df8/4f091501-351c-45b8-9f64-4d28d4623df8.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1114.512872] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'device_type': 'disk', 'encryption_secret_uuid': None, 'size': 0, 'encryption_options': None, 'boot_index': 0, 'encrypted': False, 'device_name': '/dev/sda', 'guest_format': None, 'disk_bus': None, 'encryption_format': None, 'image_id': 'afa9d32c-9f39-44fb-bf3b-50d35842a59f'}], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701117', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'name': 'volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4f091501-351c-45b8-9f64-4d28d4623df8', 'attached_at': '', 'detached_at': '', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'serial': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a'}, 'device_type': None, 'attachment_id': 'f9b72cca-e07f-4553-a077-12ed9beb0c74', 'mount_device': '/dev/sdb', 'delete_on_termination': False, 'boot_index': None, 'guest_format': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69475) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1114.513099] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Volume attach. Driver type: vmdk {{(pid=69475) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1114.513294] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701117', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'name': 'volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4f091501-351c-45b8-9f64-4d28d4623df8', 'attached_at': '', 'detached_at': '', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'serial': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1114.514028] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3663b0-af92-4b72-920f-d48bb3068054 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.533414] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b77584-d2fa-43b0-9ff0-e254d3d917af {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.543174] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ed12fb-99f5-ff8e-e402-0d29ce762f9d, 'name': SearchDatastore_Task, 'duration_secs': 0.030111} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.555680] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.555916] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1114.556139] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1114.563691] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a/volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1114.564274] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-38a68c1c-870b-488e-a1db-c009a3615663 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.580459] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521ea770-b32e-5c02-d765-10a991542f7b, 'name': SearchDatastore_Task, 'duration_secs': 0.034824} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.581583] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b460b8b3-8e54-4796-bed5-d7d432617832 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.586147] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1114.586147] env[69475]: value = "task-3508920" [ 1114.586147] env[69475]: _type = "Task" [ 1114.586147] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.587426] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1114.587426] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521aa45a-445a-ccb7-1680-d7b4bc779421" [ 1114.587426] env[69475]: _type = "Task" [ 1114.587426] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.598019] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508920, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.600970] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521aa45a-445a-ccb7-1680-d7b4bc779421, 'name': SearchDatastore_Task, 'duration_secs': 0.008798} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.601248] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.601568] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 1459221f-4c35-4a49-a8c0-f8b4ee3e2265/1459221f-4c35-4a49-a8c0-f8b4ee3e2265.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1114.601892] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1114.602105] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1114.602363] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bbb9f346-5c4e-4baa-a24d-5bbcfe26ca23 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.604703] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c9ca2c5-09a8-4492-8553-956b3c06784a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.611109] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1114.611109] env[69475]: value = "task-3508921" [ 1114.611109] env[69475]: _type = "Task" [ 1114.611109] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.614690] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1114.614868] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1114.615841] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fbd0643-c21e-4965-a655-3ac14cabb815 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.620891] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508921, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.623602] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1114.623602] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522dfed1-b8b5-926c-f277-2d5407b327c8" [ 1114.623602] env[69475]: _type = "Task" [ 1114.623602] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.631102] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522dfed1-b8b5-926c-f277-2d5407b327c8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.958354] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.994906] env[69475]: DEBUG oslo_concurrency.lockutils [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.852s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1114.997866] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.787s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.998225] env[69475]: DEBUG nova.objects.instance [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lazy-loading 'resources' on Instance uuid 41ddf915-343b-46e4-834e-11ab3899242f {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1115.012484] env[69475]: DEBUG oslo_vmware.api [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508918, 'name': PowerOnVM_Task, 'duration_secs': 0.519145} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.012769] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1115.013169] env[69475]: DEBUG nova.compute.manager [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1115.013872] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4cf09e2-8452-4380-9a26-3de2b5ab19ab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.020334] env[69475]: INFO nova.scheduler.client.report [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Deleted allocations for instance 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9 [ 1115.099298] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508920, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.120415] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508921, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.451968} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.120666] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 1459221f-4c35-4a49-a8c0-f8b4ee3e2265/1459221f-4c35-4a49-a8c0-f8b4ee3e2265.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1115.120883] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1115.121153] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5a0427a4-f9d8-493a-9503-8f9e1e23fda3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.128688] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1115.128688] env[69475]: value = "task-3508922" [ 1115.128688] env[69475]: _type = "Task" [ 1115.128688] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.135747] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522dfed1-b8b5-926c-f277-2d5407b327c8, 'name': SearchDatastore_Task, 'duration_secs': 0.008716} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.136838] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c676ec28-e628-4c04-bbeb-35e4726957d4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.141980] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508922, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.145285] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1115.145285] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52927f10-fbbe-ba8c-6982-a4ccd2b55452" [ 1115.145285] env[69475]: _type = "Task" [ 1115.145285] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.153113] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52927f10-fbbe-ba8c-6982-a4ccd2b55452, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.161629] env[69475]: DEBUG nova.compute.manager [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1115.189325] env[69475]: DEBUG nova.virt.hardware [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1115.189569] env[69475]: DEBUG nova.virt.hardware [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1115.189728] env[69475]: DEBUG nova.virt.hardware [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1115.189908] env[69475]: DEBUG nova.virt.hardware [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1115.190086] env[69475]: DEBUG nova.virt.hardware [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1115.190250] env[69475]: DEBUG nova.virt.hardware [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1115.190462] env[69475]: DEBUG nova.virt.hardware [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1115.190623] env[69475]: DEBUG nova.virt.hardware [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1115.190792] env[69475]: DEBUG nova.virt.hardware [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1115.190954] env[69475]: DEBUG nova.virt.hardware [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1115.191196] env[69475]: DEBUG nova.virt.hardware [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1115.192062] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfee400b-bed4-499e-ae54-d73704746229 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.199961] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2226e136-738f-4957-af97-1ce33ff73949 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.258605] env[69475]: DEBUG nova.compute.manager [req-4d03aa47-6b40-4e6f-8a64-1790096e32c5 req-140960c1-c4fd-4b02-9fea-7d1f59b61818 service nova] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Received event network-vif-deleted-0241fad0-a699-4ab6-8665-37a808867cd9 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1115.541380] env[69475]: DEBUG oslo_concurrency.lockutils [None req-191040b3-dfce-41e3-9595-98d64129cfbd tempest-ServerAddressesTestJSON-1752713486 tempest-ServerAddressesTestJSON-1752713486-project-member] Lock "60516e16-bd7e-4fc1-b95f-603fb5ef6ae9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.208s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.542614] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.602781] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508920, 'name': ReconfigVM_Task, 'duration_secs': 0.556053} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.603054] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Reconfigured VM instance instance-00000062 to attach disk [datastore2] volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a/volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1115.611097] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-817af9ac-3a22-4958-adef-4c06e943b558 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.629565] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1115.629565] env[69475]: value = "task-3508923" [ 1115.629565] env[69475]: _type = "Task" [ 1115.629565] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.637184] env[69475]: DEBUG nova.network.neutron [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Successfully updated port: 30580d6c-38c0-4196-9332-ebfc62061eaa {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1115.646489] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508922, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065159} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.654501] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1115.655672] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508923, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.656660] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5629b896-c67d-4eb2-b539-ff1e4964ed34 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.666746] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52927f10-fbbe-ba8c-6982-a4ccd2b55452, 'name': SearchDatastore_Task, 'duration_secs': 0.009646} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.678433] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1115.678764] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 55d3513b-e0ad-49a7-bd26-147b1b2632cb/55d3513b-e0ad-49a7-bd26-147b1b2632cb.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1115.688215] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 1459221f-4c35-4a49-a8c0-f8b4ee3e2265/1459221f-4c35-4a49-a8c0-f8b4ee3e2265.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1115.688741] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3882417-15bc-4f86-8b78-5804fab9c384 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.690780] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8ff8acc-0d6a-4f9d-85a9-8015b631f700 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.713205] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1115.713205] env[69475]: value = "task-3508925" [ 1115.713205] env[69475]: _type = "Task" [ 1115.713205] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.714516] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1115.714516] env[69475]: value = "task-3508924" [ 1115.714516] env[69475]: _type = "Task" [ 1115.714516] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.728350] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508925, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.731344] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.823965] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9854a9f4-2ca9-4a7a-bd07-489f50a606da {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.831865] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec71c7b1-6711-4c4e-873b-0993233be919 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.861886] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd770d56-523d-437e-9e25-1d7ac60e411a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.870692] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb05a085-7b74-4e4d-b2ac-23405346c20c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.884199] env[69475]: DEBUG nova.compute.provider_tree [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1116.066262] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Acquiring lock "74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.066518] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Lock "74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.066802] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Acquiring lock "74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1116.066941] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Lock "74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.067182] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Lock "74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.069560] env[69475]: INFO nova.compute.manager [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Terminating instance [ 1116.140069] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "refresh_cache-cc85e976-78cf-4289-9674-d697630e7775" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.140069] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "refresh_cache-cc85e976-78cf-4289-9674-d697630e7775" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1116.140373] env[69475]: DEBUG nova.network.neutron [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1116.141414] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508923, 'name': ReconfigVM_Task, 'duration_secs': 0.405727} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.141856] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701117', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'name': 'volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4f091501-351c-45b8-9f64-4d28d4623df8', 'attached_at': '', 'detached_at': '', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'serial': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1116.142438] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b34c48e5-8fdf-489f-87de-1e630f87dadb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.148696] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1116.148696] env[69475]: value = "task-3508926" [ 1116.148696] env[69475]: _type = "Task" [ 1116.148696] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.157025] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508926, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.230072] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508925, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.230468] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508924, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.390079] env[69475]: DEBUG nova.scheduler.client.report [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1116.573234] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Acquiring lock "refresh_cache-74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.573519] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Acquired lock "refresh_cache-74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1116.575828] env[69475]: DEBUG nova.network.neutron [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1116.662739] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508926, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.685569] env[69475]: DEBUG nova.network.neutron [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1116.727540] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508925, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.731517] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508924, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.848085] env[69475]: DEBUG nova.network.neutron [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Updating instance_info_cache with network_info: [{"id": "30580d6c-38c0-4196-9332-ebfc62061eaa", "address": "fa:16:3e:7f:e4:e0", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30580d6c-38", "ovs_interfaceid": "30580d6c-38c0-4196-9332-ebfc62061eaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.895413] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.898s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.903532] env[69475]: DEBUG oslo_concurrency.lockutils [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.464s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1116.903532] env[69475]: DEBUG nova.objects.instance [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lazy-loading 'resources' on Instance uuid 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1116.923052] env[69475]: INFO nova.scheduler.client.report [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Deleted allocations for instance 41ddf915-343b-46e4-834e-11ab3899242f [ 1117.102434] env[69475]: DEBUG nova.network.neutron [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1117.159884] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508926, 'name': Rename_Task, 'duration_secs': 0.622086} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.160186] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1117.160422] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc706302-f8dd-49b4-8a03-dc8bd4131cbd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.166574] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1117.166574] env[69475]: value = "task-3508927" [ 1117.166574] env[69475]: _type = "Task" [ 1117.166574] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.174306] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508927, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.176673] env[69475]: DEBUG nova.network.neutron [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.232794] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508924, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.030112} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.236428] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 55d3513b-e0ad-49a7-bd26-147b1b2632cb/55d3513b-e0ad-49a7-bd26-147b1b2632cb.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1117.236707] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1117.237067] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508925, 'name': ReconfigVM_Task, 'duration_secs': 1.128262} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.237332] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27b0b783-a8ec-4429-93d7-6f6e7a54230e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.239596] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 1459221f-4c35-4a49-a8c0-f8b4ee3e2265/1459221f-4c35-4a49-a8c0-f8b4ee3e2265.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1117.240252] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-48852d1f-aca0-40bc-8799-7110be64ce29 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.246675] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1117.246675] env[69475]: value = "task-3508928" [ 1117.246675] env[69475]: _type = "Task" [ 1117.246675] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.248043] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1117.248043] env[69475]: value = "task-3508929" [ 1117.248043] env[69475]: _type = "Task" [ 1117.248043] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.258881] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508928, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.262134] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508929, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.295149] env[69475]: DEBUG nova.compute.manager [req-cdfcd4cd-f341-4b05-88b7-bcf97cef8c1a req-e14ee1f4-9b93-42c0-9b89-dd7a27894156 service nova] [instance: cc85e976-78cf-4289-9674-d697630e7775] Received event network-vif-plugged-30580d6c-38c0-4196-9332-ebfc62061eaa {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1117.295317] env[69475]: DEBUG oslo_concurrency.lockutils [req-cdfcd4cd-f341-4b05-88b7-bcf97cef8c1a req-e14ee1f4-9b93-42c0-9b89-dd7a27894156 service nova] Acquiring lock "cc85e976-78cf-4289-9674-d697630e7775-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1117.295525] env[69475]: DEBUG oslo_concurrency.lockutils [req-cdfcd4cd-f341-4b05-88b7-bcf97cef8c1a req-e14ee1f4-9b93-42c0-9b89-dd7a27894156 service nova] Lock "cc85e976-78cf-4289-9674-d697630e7775-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1117.295947] env[69475]: DEBUG oslo_concurrency.lockutils [req-cdfcd4cd-f341-4b05-88b7-bcf97cef8c1a req-e14ee1f4-9b93-42c0-9b89-dd7a27894156 service nova] Lock "cc85e976-78cf-4289-9674-d697630e7775-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.296564] env[69475]: DEBUG nova.compute.manager [req-cdfcd4cd-f341-4b05-88b7-bcf97cef8c1a req-e14ee1f4-9b93-42c0-9b89-dd7a27894156 service nova] [instance: cc85e976-78cf-4289-9674-d697630e7775] No waiting events found dispatching network-vif-plugged-30580d6c-38c0-4196-9332-ebfc62061eaa {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1117.296761] env[69475]: WARNING nova.compute.manager [req-cdfcd4cd-f341-4b05-88b7-bcf97cef8c1a req-e14ee1f4-9b93-42c0-9b89-dd7a27894156 service nova] [instance: cc85e976-78cf-4289-9674-d697630e7775] Received unexpected event network-vif-plugged-30580d6c-38c0-4196-9332-ebfc62061eaa for instance with vm_state building and task_state spawning. [ 1117.296821] env[69475]: DEBUG nova.compute.manager [req-cdfcd4cd-f341-4b05-88b7-bcf97cef8c1a req-e14ee1f4-9b93-42c0-9b89-dd7a27894156 service nova] [instance: cc85e976-78cf-4289-9674-d697630e7775] Received event network-changed-30580d6c-38c0-4196-9332-ebfc62061eaa {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1117.296975] env[69475]: DEBUG nova.compute.manager [req-cdfcd4cd-f341-4b05-88b7-bcf97cef8c1a req-e14ee1f4-9b93-42c0-9b89-dd7a27894156 service nova] [instance: cc85e976-78cf-4289-9674-d697630e7775] Refreshing instance network info cache due to event network-changed-30580d6c-38c0-4196-9332-ebfc62061eaa. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1117.297290] env[69475]: DEBUG oslo_concurrency.lockutils [req-cdfcd4cd-f341-4b05-88b7-bcf97cef8c1a req-e14ee1f4-9b93-42c0-9b89-dd7a27894156 service nova] Acquiring lock "refresh_cache-cc85e976-78cf-4289-9674-d697630e7775" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.350483] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "refresh_cache-cc85e976-78cf-4289-9674-d697630e7775" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.350832] env[69475]: DEBUG nova.compute.manager [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Instance network_info: |[{"id": "30580d6c-38c0-4196-9332-ebfc62061eaa", "address": "fa:16:3e:7f:e4:e0", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30580d6c-38", "ovs_interfaceid": "30580d6c-38c0-4196-9332-ebfc62061eaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1117.351170] env[69475]: DEBUG oslo_concurrency.lockutils [req-cdfcd4cd-f341-4b05-88b7-bcf97cef8c1a req-e14ee1f4-9b93-42c0-9b89-dd7a27894156 service nova] Acquired lock "refresh_cache-cc85e976-78cf-4289-9674-d697630e7775" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.351356] env[69475]: DEBUG nova.network.neutron [req-cdfcd4cd-f341-4b05-88b7-bcf97cef8c1a req-e14ee1f4-9b93-42c0-9b89-dd7a27894156 service nova] [instance: cc85e976-78cf-4289-9674-d697630e7775] Refreshing network info cache for port 30580d6c-38c0-4196-9332-ebfc62061eaa {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1117.352701] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:e4:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '30580d6c-38c0-4196-9332-ebfc62061eaa', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1117.361102] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1117.362117] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc85e976-78cf-4289-9674-d697630e7775] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1117.362733] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c67824aa-b9ef-4e3b-9ddc-825d44d10e06 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.385513] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1117.385513] env[69475]: value = "task-3508930" [ 1117.385513] env[69475]: _type = "Task" [ 1117.385513] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.393684] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508930, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.432884] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b2497b40-7649-442f-b476-ae2368ae0822 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "41ddf915-343b-46e4-834e-11ab3899242f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.917s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1117.626978] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82787554-a498-4908-a942-3d376df766eb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.635665] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a7d273-8c23-4869-960b-07cc01d1afeb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.675510] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b31c363-12af-48a2-944b-1155f2540f1c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.683351] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Releasing lock "refresh_cache-74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.683763] env[69475]: DEBUG nova.compute.manager [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1117.683955] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1117.684252] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508927, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.686777] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcdd2804-1d03-42c4-9a40-9af645a4b976 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.690233] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1bfe4e1-9cdc-4ca0-ac74-09914b288f64 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.698044] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1117.705761] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba3f7e87-a83d-4f50-82c0-58fba9864868 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.707645] env[69475]: DEBUG nova.compute.provider_tree [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1117.714214] env[69475]: DEBUG oslo_vmware.api [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1117.714214] env[69475]: value = "task-3508931" [ 1117.714214] env[69475]: _type = "Task" [ 1117.714214] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.722731] env[69475]: DEBUG oslo_vmware.api [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508931, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.760454] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508928, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.127607} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.763816] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1117.764288] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508929, 'name': Rename_Task, 'duration_secs': 0.251499} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.765215] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa82fe9b-be14-4c30-a6fa-379ecdc347b3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.768142] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1117.768366] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-baf20193-5258-4383-8d2a-c9ddbf381986 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.790718] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 55d3513b-e0ad-49a7-bd26-147b1b2632cb/55d3513b-e0ad-49a7-bd26-147b1b2632cb.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1117.792828] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b1a58fc-85ca-4e76-9bda-b63aa8be7354 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.811170] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1117.811170] env[69475]: value = "task-3508932" [ 1117.811170] env[69475]: _type = "Task" [ 1117.811170] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.817027] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1117.817027] env[69475]: value = "task-3508933" [ 1117.817027] env[69475]: _type = "Task" [ 1117.817027] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.824205] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508932, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.830120] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508933, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.897042] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508930, 'name': CreateVM_Task, 'duration_secs': 0.359971} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.897306] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc85e976-78cf-4289-9674-d697630e7775] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1117.898125] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.898252] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.898588] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1117.898862] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-580f15cd-615d-4db1-8231-d166bdc1ddb9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.905487] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1117.905487] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524e462b-398d-d2ee-acb4-9091a7a08a1c" [ 1117.905487] env[69475]: _type = "Task" [ 1117.905487] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.913121] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524e462b-398d-d2ee-acb4-9091a7a08a1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.176053] env[69475]: DEBUG nova.network.neutron [req-cdfcd4cd-f341-4b05-88b7-bcf97cef8c1a req-e14ee1f4-9b93-42c0-9b89-dd7a27894156 service nova] [instance: cc85e976-78cf-4289-9674-d697630e7775] Updated VIF entry in instance network info cache for port 30580d6c-38c0-4196-9332-ebfc62061eaa. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1118.176503] env[69475]: DEBUG nova.network.neutron [req-cdfcd4cd-f341-4b05-88b7-bcf97cef8c1a req-e14ee1f4-9b93-42c0-9b89-dd7a27894156 service nova] [instance: cc85e976-78cf-4289-9674-d697630e7775] Updating instance_info_cache with network_info: [{"id": "30580d6c-38c0-4196-9332-ebfc62061eaa", "address": "fa:16:3e:7f:e4:e0", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap30580d6c-38", "ovs_interfaceid": "30580d6c-38c0-4196-9332-ebfc62061eaa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.182729] env[69475]: DEBUG oslo_vmware.api [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3508927, 'name': PowerOnVM_Task, 'duration_secs': 0.721742} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.183062] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1118.183311] env[69475]: DEBUG nova.compute.manager [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1118.184226] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c93bc716-318a-497e-bdc6-bfed072329ed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.211085] env[69475]: DEBUG nova.scheduler.client.report [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1118.226559] env[69475]: DEBUG oslo_vmware.api [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508931, 'name': PowerOffVM_Task, 'duration_secs': 0.118216} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.226559] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1118.226559] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1118.226559] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a59fd938-045b-4168-bdf0-b48f94ff2885 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.252023] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1118.252023] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1118.252023] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Deleting the datastore file [datastore2] 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1118.252023] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2865cea5-0de3-4a5a-954e-0190b6bf175b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.260591] env[69475]: DEBUG oslo_vmware.api [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for the task: (returnval){ [ 1118.260591] env[69475]: value = "task-3508935" [ 1118.260591] env[69475]: _type = "Task" [ 1118.260591] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.269453] env[69475]: DEBUG oslo_vmware.api [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508935, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.321901] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508932, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.330292] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508933, 'name': ReconfigVM_Task, 'duration_secs': 0.457745} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.332031] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 55d3513b-e0ad-49a7-bd26-147b1b2632cb/55d3513b-e0ad-49a7-bd26-147b1b2632cb.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1118.332031] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6bd77e43-1281-48db-977e-2c2c555515a1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.337894] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1118.337894] env[69475]: value = "task-3508936" [ 1118.337894] env[69475]: _type = "Task" [ 1118.337894] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.346063] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508936, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.416058] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524e462b-398d-d2ee-acb4-9091a7a08a1c, 'name': SearchDatastore_Task, 'duration_secs': 0.061348} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.416226] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.416502] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1118.416792] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1118.416949] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1118.417166] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1118.421034] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dfe5eea7-9fe4-43b2-bb18-82979f0c2ae7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.428489] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1118.428710] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1118.429490] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb9940b6-d93d-464d-a890-e3cf1eb59b9e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.434728] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1118.434728] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e5dffb-8755-5555-adee-e76c7a8575a6" [ 1118.434728] env[69475]: _type = "Task" [ 1118.434728] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.443008] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e5dffb-8755-5555-adee-e76c7a8575a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.679750] env[69475]: DEBUG oslo_concurrency.lockutils [req-cdfcd4cd-f341-4b05-88b7-bcf97cef8c1a req-e14ee1f4-9b93-42c0-9b89-dd7a27894156 service nova] Releasing lock "refresh_cache-cc85e976-78cf-4289-9674-d697630e7775" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.708060] env[69475]: DEBUG oslo_concurrency.lockutils [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1118.715849] env[69475]: DEBUG oslo_concurrency.lockutils [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.816s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.718120] env[69475]: DEBUG oslo_concurrency.lockutils [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.554s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.719270] env[69475]: DEBUG oslo_concurrency.lockutils [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1118.720654] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.763s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1118.721096] env[69475]: DEBUG nova.objects.instance [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lazy-loading 'resources' on Instance uuid 8f18d683-7734-4798-8963-7336fe229f16 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1118.741336] env[69475]: INFO nova.scheduler.client.report [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Deleted allocations for instance baf27027-678d-4167-bb9b-df410aeb0e82 [ 1118.743697] env[69475]: INFO nova.scheduler.client.report [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Deleted allocations for instance 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35 [ 1118.772878] env[69475]: DEBUG oslo_vmware.api [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Task: {'id': task-3508935, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.227207} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.773340] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1118.773789] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1118.774034] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1118.774311] env[69475]: INFO nova.compute.manager [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Took 1.09 seconds to destroy the instance on the hypervisor. [ 1118.774637] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1118.775214] env[69475]: DEBUG nova.compute.manager [-] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1118.775672] env[69475]: DEBUG nova.network.neutron [-] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1118.819575] env[69475]: DEBUG nova.network.neutron [-] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1118.828452] env[69475]: DEBUG oslo_vmware.api [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508932, 'name': PowerOnVM_Task, 'duration_secs': 0.762166} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.828944] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1118.829205] env[69475]: INFO nova.compute.manager [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Took 8.37 seconds to spawn the instance on the hypervisor. [ 1118.829595] env[69475]: DEBUG nova.compute.manager [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1118.830410] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa3999d-6182-49f9-8d9d-1e203205bbb4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.850938] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508936, 'name': Rename_Task, 'duration_secs': 0.148427} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.854304] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1118.854304] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e11bbe58-b716-4603-b1ab-7006fff05719 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.859558] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1118.859558] env[69475]: value = "task-3508937" [ 1118.859558] env[69475]: _type = "Task" [ 1118.859558] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.873732] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508937, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.949537] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e5dffb-8755-5555-adee-e76c7a8575a6, 'name': SearchDatastore_Task, 'duration_secs': 0.011696} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.950430] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1bd86fc4-b217-488f-8a95-4ccc4a620afb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.956725] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1118.956725] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525ab0d9-7b8c-beaf-17b5-8ff1365c9a8f" [ 1118.956725] env[69475]: _type = "Task" [ 1118.956725] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.966673] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525ab0d9-7b8c-beaf-17b5-8ff1365c9a8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.256214] env[69475]: DEBUG oslo_concurrency.lockutils [None req-993ade2e-2a5c-4224-81cc-935ac28d0e6c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.738s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.257212] env[69475]: DEBUG oslo_concurrency.lockutils [req-2cb284f0-8f75-4536-8fa6-6101af945e2e req-5164ca01-332e-48c5-bdfc-cf24db09d189 service nova] Acquired lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1119.257741] env[69475]: DEBUG oslo_concurrency.lockutils [None req-832c98a5-664a-4b14-9959-cca5f70e3adb tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "baf27027-678d-4167-bb9b-df410aeb0e82" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.483s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.260548] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6932faa-137a-42e4-87ea-84d86ada750c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.274243] env[69475]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1119.274243] env[69475]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=69475) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1119.274975] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-af3d0e2b-7cff-43b8-84f6-055f6c356499 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.289391] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3529358-f97b-4b32-ab35-257fdbb19f45 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.341498] env[69475]: DEBUG nova.network.neutron [-] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1119.343662] env[69475]: ERROR root [req-2cb284f0-8f75-4536-8fa6-6101af945e2e req-5164ca01-332e-48c5-bdfc-cf24db09d189 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-701090' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 479, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-701090' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-701090' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-701090'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-701090' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-701090' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-701090'}\n"]: nova.exception.InstanceNotFound: Instance 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35 could not be found. [ 1119.343867] env[69475]: DEBUG oslo_concurrency.lockutils [req-2cb284f0-8f75-4536-8fa6-6101af945e2e req-5164ca01-332e-48c5-bdfc-cf24db09d189 service nova] Releasing lock "1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.344098] env[69475]: DEBUG nova.compute.manager [req-2cb284f0-8f75-4536-8fa6-6101af945e2e req-5164ca01-332e-48c5-bdfc-cf24db09d189 service nova] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Detach interface failed, port_id=e60a34f2-9926-41dc-a777-3d0e92f22ce9, reason: Instance 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1119.358815] env[69475]: INFO nova.compute.manager [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Took 25.06 seconds to build instance. [ 1119.370664] env[69475]: DEBUG oslo_vmware.api [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508937, 'name': PowerOnVM_Task, 'duration_secs': 0.447228} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.373994] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1119.374756] env[69475]: INFO nova.compute.manager [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Took 6.51 seconds to spawn the instance on the hypervisor. [ 1119.374756] env[69475]: DEBUG nova.compute.manager [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1119.375385] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad38a359-541b-4702-bdd4-a5b9ff2b518d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.473394] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525ab0d9-7b8c-beaf-17b5-8ff1365c9a8f, 'name': SearchDatastore_Task, 'duration_secs': 0.014473} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.473394] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.474445] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] cc85e976-78cf-4289-9674-d697630e7775/cc85e976-78cf-4289-9674-d697630e7775.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1119.474445] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98712b8f-9594-44dc-aac4-a83b5ba6ed58 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.483506] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1119.483506] env[69475]: value = "task-3508938" [ 1119.483506] env[69475]: _type = "Task" [ 1119.483506] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.498332] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508938, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.534216] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-657460e3-9cdb-4f84-9ef7-c9f1a50a1a64 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.547731] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36b97cc1-d94d-48cc-a081-c207a633d4cb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.587907] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68791907-5910-4e83-a988-54dea04e71c7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.596398] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb29c0a0-7cef-440c-ae53-c909e051b804 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.612708] env[69475]: DEBUG nova.compute.provider_tree [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1119.845674] env[69475]: INFO nova.compute.manager [-] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Took 1.07 seconds to deallocate network for instance. [ 1119.862650] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5908041f-8248-4fdf-95c8-789725873a23 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "1459221f-4c35-4a49-a8c0-f8b4ee3e2265" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.579s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.895485] env[69475]: INFO nova.compute.manager [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Took 23.17 seconds to build instance. [ 1120.005599] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508938, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.115619] env[69475]: DEBUG nova.scheduler.client.report [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1120.357638] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.399286] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0224416b-68db-45fa-887e-9fb7e4305963 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Lock "55d3513b-e0ad-49a7-bd26-147b1b2632cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.687s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.497572] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508938, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.596109} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.497856] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] cc85e976-78cf-4289-9674-d697630e7775/cc85e976-78cf-4289-9674-d697630e7775.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1120.498106] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1120.498363] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97dbbd9d-5c13-4197-965e-c0556c06ec3c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.507816] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1120.507816] env[69475]: value = "task-3508939" [ 1120.507816] env[69475]: _type = "Task" [ 1120.507816] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.518440] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508939, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.623033] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.900s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1120.625280] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 5.083s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1120.632370] env[69475]: DEBUG nova.objects.instance [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69475) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1120.660841] env[69475]: INFO nova.scheduler.client.report [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Deleted allocations for instance 8f18d683-7734-4798-8963-7336fe229f16 [ 1120.759407] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1120.759407] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.021030] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508939, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.149859} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.021030] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1121.021454] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74985f0-299a-4d2b-8c13-5791014f7892 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.051540] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] cc85e976-78cf-4289-9674-d697630e7775/cc85e976-78cf-4289-9674-d697630e7775.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1121.053784] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-090ba728-3f75-4a28-8d7a-b43d469ad249 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.078322] env[69475]: DEBUG nova.compute.manager [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Stashing vm_state: active {{(pid=69475) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1121.085986] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1121.085986] env[69475]: value = "task-3508940" [ 1121.085986] env[69475]: _type = "Task" [ 1121.085986] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.101028] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508940, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.172737] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e35c18a8-2c94-4c96-8041-681c90af412c tempest-ServersNegativeTestJSON-1911859080 tempest-ServersNegativeTestJSON-1911859080-project-member] Lock "8f18d683-7734-4798-8963-7336fe229f16" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.075s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.264222] env[69475]: DEBUG nova.compute.manager [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1121.547798] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1121.548129] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.554850] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1121.554850] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.600699] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508940, 'name': ReconfigVM_Task, 'duration_secs': 0.327028} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.600958] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Reconfigured VM instance instance-00000070 to attach disk [datastore1] cc85e976-78cf-4289-9674-d697630e7775/cc85e976-78cf-4289-9674-d697630e7775.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1121.602487] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bc7b58e4-3ea3-49ef-8144-9ad0d33df0aa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.606083] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1121.606799] env[69475]: INFO nova.compute.manager [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Rebuilding instance [ 1121.610479] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1121.610479] env[69475]: value = "task-3508941" [ 1121.610479] env[69475]: _type = "Task" [ 1121.610479] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.620227] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508941, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.644692] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a18060ad-906d-4537-b06e-47658a424d42 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.018s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1121.648977] env[69475]: DEBUG oslo_concurrency.lockutils [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 2.938s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.648977] env[69475]: DEBUG nova.objects.instance [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69475) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1121.669017] env[69475]: DEBUG nova.compute.manager [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1121.672021] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96c55518-ff90-458e-a5d5-fa573eb2fb82 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.785446] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1122.054936] env[69475]: DEBUG nova.compute.manager [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1122.057795] env[69475]: DEBUG nova.compute.manager [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1122.124347] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508941, 'name': Rename_Task, 'duration_secs': 0.159878} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.124630] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1122.124909] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e97f505e-9fde-48c2-91b2-7eb750ced20d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.134222] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1122.134222] env[69475]: value = "task-3508942" [ 1122.134222] env[69475]: _type = "Task" [ 1122.134222] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.145352] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508942, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.602341] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1122.622576] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1122.647935] env[69475]: DEBUG oslo_vmware.api [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508942, 'name': PowerOnVM_Task, 'duration_secs': 0.500551} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1122.648365] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1122.648586] env[69475]: INFO nova.compute.manager [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Took 7.49 seconds to spawn the instance on the hypervisor. [ 1122.648795] env[69475]: DEBUG nova.compute.manager [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1122.651158] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-706b448b-81f8-4215-9ab7-6ebb0b8b63d8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.656891] env[69475]: DEBUG oslo_concurrency.lockutils [None req-412d80fe-6065-4ec5-a6eb-f10bb2ea940e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.657063] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.300s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1122.657464] env[69475]: DEBUG nova.objects.instance [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Lazy-loading 'resources' on Instance uuid 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1122.689691] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1122.690570] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-00670a92-bfa9-4bb7-bd7a-41da0afc40b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1122.699464] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1122.699464] env[69475]: value = "task-3508943" [ 1122.699464] env[69475]: _type = "Task" [ 1122.699464] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1122.711022] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508943, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.179163] env[69475]: INFO nova.compute.manager [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Took 20.64 seconds to build instance. [ 1123.180186] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.180409] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.212435] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508943, 'name': PowerOffVM_Task, 'duration_secs': 0.138307} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.212731] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1123.212973] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1123.213831] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-129ac51d-2049-474a-bdd3-bc95b9052567 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.223238] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1123.223482] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6b6be761-3d64-4057-b5fa-101a9fcd59c8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.252589] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1123.253957] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1123.253957] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Deleting the datastore file [datastore2] 55d3513b-e0ad-49a7-bd26-147b1b2632cb {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1123.253957] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93a28cb7-3832-402a-b748-43345555af42 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.262274] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1123.262274] env[69475]: value = "task-3508945" [ 1123.262274] env[69475]: _type = "Task" [ 1123.262274] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.270428] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508945, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.421764] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c33942ab-c58f-4080-93af-26a873328864 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.431224] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c46ae1-e8d1-449a-bed5-aa3569e7eb93 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.470381] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a460bf-94d8-4379-b780-ddac9629cfc8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.478274] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b14ebd9-51a7-4d49-bbfb-ed6bbe63dfa1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.492075] env[69475]: DEBUG nova.compute.provider_tree [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1123.682469] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1832f39e-5e9d-4793-8368-b291f504ab74 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "cc85e976-78cf-4289-9674-d697630e7775" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.158s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.682851] env[69475]: DEBUG nova.compute.manager [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1123.771841] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508945, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.395129} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.772109] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1123.772294] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1123.772470] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1123.995856] env[69475]: DEBUG nova.scheduler.client.report [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1124.212891] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.500598] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.843s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.503732] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.897s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.517927] env[69475]: INFO nova.scheduler.client.report [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Deleted allocations for instance 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c [ 1124.661050] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "cc85e976-78cf-4289-9674-d697630e7775" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.661300] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "cc85e976-78cf-4289-9674-d697630e7775" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.661512] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "cc85e976-78cf-4289-9674-d697630e7775-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1124.661695] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "cc85e976-78cf-4289-9674-d697630e7775-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1124.661908] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "cc85e976-78cf-4289-9674-d697630e7775-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.664074] env[69475]: INFO nova.compute.manager [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Terminating instance [ 1124.807344] env[69475]: DEBUG nova.virt.hardware [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1124.807592] env[69475]: DEBUG nova.virt.hardware [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1124.807751] env[69475]: DEBUG nova.virt.hardware [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1124.807931] env[69475]: DEBUG nova.virt.hardware [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1124.808280] env[69475]: DEBUG nova.virt.hardware [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1124.808471] env[69475]: DEBUG nova.virt.hardware [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1124.808686] env[69475]: DEBUG nova.virt.hardware [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1124.808852] env[69475]: DEBUG nova.virt.hardware [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1124.809040] env[69475]: DEBUG nova.virt.hardware [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1124.809217] env[69475]: DEBUG nova.virt.hardware [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1124.809393] env[69475]: DEBUG nova.virt.hardware [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1124.810281] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685f213d-cc34-49b6-9d50-5ccdb185c704 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.818556] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf75275c-8e41-40e9-b869-68f9a2573c03 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.833966] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Instance VIF info [] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1124.839854] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1124.839983] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1124.840216] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1e66664d-554b-4592-b5e6-6d99169005b3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.857391] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1124.857391] env[69475]: value = "task-3508946" [ 1124.857391] env[69475]: _type = "Task" [ 1124.857391] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.867234] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508946, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.009143] env[69475]: INFO nova.compute.claims [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1125.025989] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1cfdb204-60fc-4eda-9d88-962234cfba10 tempest-ServerShowV254Test-491662995 tempest-ServerShowV254Test-491662995-project-member] Lock "74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.959s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.168321] env[69475]: DEBUG nova.compute.manager [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1125.168576] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1125.169550] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9748c6ee-dfa0-4d6b-aded-85bd5bda7a3f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.177272] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1125.177543] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33bf8411-976f-4e31-9207-e79ab44157d5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.184236] env[69475]: DEBUG oslo_vmware.api [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1125.184236] env[69475]: value = "task-3508947" [ 1125.184236] env[69475]: _type = "Task" [ 1125.184236] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.192382] env[69475]: DEBUG oslo_vmware.api [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508947, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.368967] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508946, 'name': CreateVM_Task, 'duration_secs': 0.244356} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.369205] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1125.369615] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.369775] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1125.370193] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1125.370450] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e23fe002-0285-4fb6-876c-dce843f9b331 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.374842] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1125.374842] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c1c17e-6faa-cfe9-11d2-703bd6f1c4c4" [ 1125.374842] env[69475]: _type = "Task" [ 1125.374842] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.382605] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c1c17e-6faa-cfe9-11d2-703bd6f1c4c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.515987] env[69475]: INFO nova.compute.resource_tracker [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Updating resource usage from migration d83e0b92-e5cf-482f-9e30-acc92b1aae0c [ 1125.697309] env[69475]: DEBUG oslo_vmware.api [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508947, 'name': PowerOffVM_Task, 'duration_secs': 0.162539} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.697579] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1125.697745] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1125.697997] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dde8a279-3922-4589-af0c-2a5203296db7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.736095] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4091752f-c755-4d70-bdf4-518f30cd2cba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.744190] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c5b3eb-19ff-4ebb-94d5-126fce9e9832 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.778738] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22ad397-292c-41d1-b9ac-efec74d7c11c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.781403] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1125.781601] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1125.781778] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleting the datastore file [datastore1] cc85e976-78cf-4289-9674-d697630e7775 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1125.782032] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ff2d492-0d23-4eef-8f6d-96b929184344 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.789917] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2ca7b12-682f-475a-9510-254ce19d808c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.793998] env[69475]: DEBUG oslo_vmware.api [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1125.793998] env[69475]: value = "task-3508949" [ 1125.793998] env[69475]: _type = "Task" [ 1125.793998] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.805354] env[69475]: DEBUG nova.compute.provider_tree [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1125.814181] env[69475]: DEBUG oslo_vmware.api [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508949, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.885959] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52c1c17e-6faa-cfe9-11d2-703bd6f1c4c4, 'name': SearchDatastore_Task, 'duration_secs': 0.011379} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.886294] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1125.886535] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1125.886773] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1125.886920] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1125.887112] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1125.887374] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a8931024-5c0a-4f9f-a111-81ba65e07cbf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.905296] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1125.905296] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1125.906031] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc042a30-997d-4792-b8c2-ce3131508064 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.911326] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1125.911326] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52751d24-f1ee-86f6-5209-8b35b9d9b83d" [ 1125.911326] env[69475]: _type = "Task" [ 1125.911326] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.919253] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52751d24-f1ee-86f6-5209-8b35b9d9b83d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.304407] env[69475]: DEBUG oslo_vmware.api [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508949, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.285248} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.304643] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1126.304870] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1126.305131] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1126.305306] env[69475]: INFO nova.compute.manager [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: cc85e976-78cf-4289-9674-d697630e7775] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1126.305542] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1126.305733] env[69475]: DEBUG nova.compute.manager [-] [instance: cc85e976-78cf-4289-9674-d697630e7775] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1126.305869] env[69475]: DEBUG nova.network.neutron [-] [instance: cc85e976-78cf-4289-9674-d697630e7775] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1126.308135] env[69475]: DEBUG nova.scheduler.client.report [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1126.423070] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52751d24-f1ee-86f6-5209-8b35b9d9b83d, 'name': SearchDatastore_Task, 'duration_secs': 0.008893} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.423902] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fba934be-a268-49e0-bc9d-b53c16eb18b4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.429450] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1126.429450] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527e28dc-0f2c-6168-58f0-dd5beb561323" [ 1126.429450] env[69475]: _type = "Task" [ 1126.429450] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.438115] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527e28dc-0f2c-6168-58f0-dd5beb561323, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.673309] env[69475]: DEBUG nova.compute.manager [req-a08800f3-f731-453f-a86f-7d2e2671a4d1 req-4168e0df-972e-486f-a40f-95ccc078ab94 service nova] [instance: cc85e976-78cf-4289-9674-d697630e7775] Received event network-vif-deleted-30580d6c-38c0-4196-9332-ebfc62061eaa {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1126.673309] env[69475]: INFO nova.compute.manager [req-a08800f3-f731-453f-a86f-7d2e2671a4d1 req-4168e0df-972e-486f-a40f-95ccc078ab94 service nova] [instance: cc85e976-78cf-4289-9674-d697630e7775] Neutron deleted interface 30580d6c-38c0-4196-9332-ebfc62061eaa; detaching it from the instance and deleting it from the info cache [ 1126.673309] env[69475]: DEBUG nova.network.neutron [req-a08800f3-f731-453f-a86f-7d2e2671a4d1 req-4168e0df-972e-486f-a40f-95ccc078ab94 service nova] [instance: cc85e976-78cf-4289-9674-d697630e7775] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.813247] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.310s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.813399] env[69475]: INFO nova.compute.manager [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Migrating [ 1126.819614] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.034s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.821115] env[69475]: INFO nova.compute.claims [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1126.941160] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527e28dc-0f2c-6168-58f0-dd5beb561323, 'name': SearchDatastore_Task, 'duration_secs': 0.012455} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.942650] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1126.943079] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 55d3513b-e0ad-49a7-bd26-147b1b2632cb/55d3513b-e0ad-49a7-bd26-147b1b2632cb.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1126.943383] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-da14260a-a722-4b83-9410-94a1f50c785f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.950438] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1126.950438] env[69475]: value = "task-3508950" [ 1126.950438] env[69475]: _type = "Task" [ 1126.950438] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.957749] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508950, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.155167] env[69475]: DEBUG nova.network.neutron [-] [instance: cc85e976-78cf-4289-9674-d697630e7775] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1127.175353] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e82a5fc0-7089-4cc2-8376-3789ebc59b31 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.186108] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e025bb0f-dc59-4d03-82b1-0ab455cba9b5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.219112] env[69475]: DEBUG nova.compute.manager [req-a08800f3-f731-453f-a86f-7d2e2671a4d1 req-4168e0df-972e-486f-a40f-95ccc078ab94 service nova] [instance: cc85e976-78cf-4289-9674-d697630e7775] Detach interface failed, port_id=30580d6c-38c0-4196-9332-ebfc62061eaa, reason: Instance cc85e976-78cf-4289-9674-d697630e7775 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1127.332960] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "refresh_cache-1459221f-4c35-4a49-a8c0-f8b4ee3e2265" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.333207] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "refresh_cache-1459221f-4c35-4a49-a8c0-f8b4ee3e2265" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1127.333333] env[69475]: DEBUG nova.network.neutron [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1127.462099] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508950, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.658591] env[69475]: INFO nova.compute.manager [-] [instance: cc85e976-78cf-4289-9674-d697630e7775] Took 1.35 seconds to deallocate network for instance. [ 1127.963779] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508950, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.638722} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.966227] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 55d3513b-e0ad-49a7-bd26-147b1b2632cb/55d3513b-e0ad-49a7-bd26-147b1b2632cb.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1127.966448] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1127.967160] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-99ca33ad-de4d-43aa-b75f-9bc95c147dff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.973833] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1127.973833] env[69475]: value = "task-3508951" [ 1127.973833] env[69475]: _type = "Task" [ 1127.973833] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.983313] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508951, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.060173] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7988b1f-88ef-464f-912e-eafbfc15b869 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.067281] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60bce1a7-194e-4478-872a-1a7d81395fd5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.098282] env[69475]: DEBUG nova.network.neutron [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Updating instance_info_cache with network_info: [{"id": "75df31f7-58d6-423b-80c5-e46458f30a93", "address": "fa:16:3e:84:d9:20", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75df31f7-58", "ovs_interfaceid": "75df31f7-58d6-423b-80c5-e46458f30a93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.099898] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc98571-4bad-465a-bed8-b5e93134177c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.107116] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10aa67fd-9ab5-406a-90ab-cc5eb2c0aa24 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.120573] env[69475]: DEBUG nova.compute.provider_tree [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1128.165194] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.170660] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1128.170877] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.171065] env[69475]: INFO nova.compute.manager [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Shelving [ 1128.483610] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508951, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063175} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.483901] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1128.484636] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d63450da-33c2-4ad1-ba9d-98684a153b9f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.503817] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] 55d3513b-e0ad-49a7-bd26-147b1b2632cb/55d3513b-e0ad-49a7-bd26-147b1b2632cb.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1128.504081] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56ca1d07-bd4b-4f77-98cf-bd31f3cecf42 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1128.522205] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1128.522205] env[69475]: value = "task-3508952" [ 1128.522205] env[69475]: _type = "Task" [ 1128.522205] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1128.531177] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508952, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1128.604077] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "refresh_cache-1459221f-4c35-4a49-a8c0-f8b4ee3e2265" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1128.623291] env[69475]: DEBUG nova.scheduler.client.report [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1129.031779] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508952, 'name': ReconfigVM_Task, 'duration_secs': 0.280464} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.032115] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Reconfigured VM instance instance-0000006f to attach disk [datastore2] 55d3513b-e0ad-49a7-bd26-147b1b2632cb/55d3513b-e0ad-49a7-bd26-147b1b2632cb.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1129.032924] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8607acc3-21a9-4ea0-b5ae-70a6b6cb932a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.038869] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1129.038869] env[69475]: value = "task-3508953" [ 1129.038869] env[69475]: _type = "Task" [ 1129.038869] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.046434] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508953, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.128668] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.309s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.129244] env[69475]: DEBUG nova.compute.manager [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1129.132599] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.531s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.135793] env[69475]: INFO nova.compute.claims [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1129.183366] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1129.183680] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a05b6d5-39b0-48d2-a6b3-c119c585e454 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.190645] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1129.190645] env[69475]: value = "task-3508954" [ 1129.190645] env[69475]: _type = "Task" [ 1129.190645] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.198792] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508954, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.548842] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508953, 'name': Rename_Task, 'duration_secs': 0.124922} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.549159] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1129.549373] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3dc53cfc-325f-4e0e-9822-0aeb0c7a0cd9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.556350] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1129.556350] env[69475]: value = "task-3508955" [ 1129.556350] env[69475]: _type = "Task" [ 1129.556350] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.563791] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508955, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.638590] env[69475]: DEBUG nova.compute.utils [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1129.642113] env[69475]: DEBUG nova.compute.manager [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1129.642281] env[69475]: DEBUG nova.network.neutron [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1129.696997] env[69475]: DEBUG nova.policy [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '11c9c75b1984423f860daec9827e7ce6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67d27343d8c04fc9a2bed7a764f6cf82', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1129.702149] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508954, 'name': PowerOffVM_Task, 'duration_secs': 0.193293} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.702433] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1129.703253] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034d9984-297b-475d-9a36-329ff9711646 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.722917] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5102bf84-3eae-4378-bc05-0370ff6e5bcd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.039951] env[69475]: DEBUG nova.network.neutron [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Successfully created port: f181f990-1cef-4b68-ae07-ea93c380f5a0 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1130.066199] env[69475]: DEBUG oslo_vmware.api [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508955, 'name': PowerOnVM_Task, 'duration_secs': 0.419846} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.066479] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1130.066688] env[69475]: DEBUG nova.compute.manager [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1130.067458] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd008eb-7801-47a4-a143-cbb2f1dbdde1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.117952] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8c3de41-7fa0-4870-a722-50dee308cc27 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.136045] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Updating instance '1459221f-4c35-4a49-a8c0-f8b4ee3e2265' progress to 0 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1130.144071] env[69475]: DEBUG nova.compute.manager [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1130.238036] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1130.238036] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e721e676-7fd2-4b21-8f2a-5d7b3624cbab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.244106] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1130.244106] env[69475]: value = "task-3508956" [ 1130.244106] env[69475]: _type = "Task" [ 1130.244106] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.257295] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508956, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.402976] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bf76b5a-32c0-4d31-a51f-b0298c0b7e72 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.412547] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf602200-1381-4cb4-9faf-403be73e9d8b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.444236] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d82ba6-fe8c-4179-8f1d-addb3710e3bd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.451388] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d279d9-8709-4067-a86c-5ddd2b5e1e1d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.464256] env[69475]: DEBUG nova.compute.provider_tree [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1130.582639] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.644096] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1130.644096] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-13697dfb-296a-4e04-a4b4-296a9e117113 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.655789] env[69475]: DEBUG oslo_vmware.api [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1130.655789] env[69475]: value = "task-3508957" [ 1130.655789] env[69475]: _type = "Task" [ 1130.655789] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.666133] env[69475]: DEBUG oslo_vmware.api [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508957, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.753812] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508956, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.967898] env[69475]: DEBUG nova.scheduler.client.report [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1131.158654] env[69475]: DEBUG nova.compute.manager [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1131.169938] env[69475]: DEBUG oslo_vmware.api [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508957, 'name': PowerOffVM_Task, 'duration_secs': 0.201013} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.170287] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1131.170734] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Updating instance '1459221f-4c35-4a49-a8c0-f8b4ee3e2265' progress to 17 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1131.185645] env[69475]: DEBUG nova.virt.hardware [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1131.185890] env[69475]: DEBUG nova.virt.hardware [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1131.186070] env[69475]: DEBUG nova.virt.hardware [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1131.186253] env[69475]: DEBUG nova.virt.hardware [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1131.186401] env[69475]: DEBUG nova.virt.hardware [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1131.186547] env[69475]: DEBUG nova.virt.hardware [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1131.186752] env[69475]: DEBUG nova.virt.hardware [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1131.186910] env[69475]: DEBUG nova.virt.hardware [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1131.187170] env[69475]: DEBUG nova.virt.hardware [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1131.187356] env[69475]: DEBUG nova.virt.hardware [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1131.187532] env[69475]: DEBUG nova.virt.hardware [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1131.188333] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f078f242-52ee-4e43-b8ea-ab72280a7531 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.195858] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb0acaf-54e4-41c5-89c9-44f11f4bbd5f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.254195] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508956, 'name': CreateSnapshot_Task, 'duration_secs': 0.626509} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.254573] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1131.255347] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848945da-c2e8-44e9-85ac-a7195c877ce2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.473369] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.341s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.473889] env[69475]: DEBUG nova.compute.manager [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1131.477010] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.857s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.478425] env[69475]: INFO nova.compute.claims [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1131.521633] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Acquiring lock "55d3513b-e0ad-49a7-bd26-147b1b2632cb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.521884] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Lock "55d3513b-e0ad-49a7-bd26-147b1b2632cb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.522134] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Acquiring lock "55d3513b-e0ad-49a7-bd26-147b1b2632cb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.522321] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Lock "55d3513b-e0ad-49a7-bd26-147b1b2632cb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.522487] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Lock "55d3513b-e0ad-49a7-bd26-147b1b2632cb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.525902] env[69475]: DEBUG nova.compute.manager [req-5ab9fa65-5185-4ff2-b226-85ff6d617b46 req-4fac7042-646b-40cd-8721-1a305fd0823c service nova] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Received event network-vif-plugged-f181f990-1cef-4b68-ae07-ea93c380f5a0 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1131.526096] env[69475]: DEBUG oslo_concurrency.lockutils [req-5ab9fa65-5185-4ff2-b226-85ff6d617b46 req-4fac7042-646b-40cd-8721-1a305fd0823c service nova] Acquiring lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1131.526442] env[69475]: DEBUG oslo_concurrency.lockutils [req-5ab9fa65-5185-4ff2-b226-85ff6d617b46 req-4fac7042-646b-40cd-8721-1a305fd0823c service nova] Lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1131.526442] env[69475]: DEBUG oslo_concurrency.lockutils [req-5ab9fa65-5185-4ff2-b226-85ff6d617b46 req-4fac7042-646b-40cd-8721-1a305fd0823c service nova] Lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.526666] env[69475]: DEBUG nova.compute.manager [req-5ab9fa65-5185-4ff2-b226-85ff6d617b46 req-4fac7042-646b-40cd-8721-1a305fd0823c service nova] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] No waiting events found dispatching network-vif-plugged-f181f990-1cef-4b68-ae07-ea93c380f5a0 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1131.526739] env[69475]: WARNING nova.compute.manager [req-5ab9fa65-5185-4ff2-b226-85ff6d617b46 req-4fac7042-646b-40cd-8721-1a305fd0823c service nova] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Received unexpected event network-vif-plugged-f181f990-1cef-4b68-ae07-ea93c380f5a0 for instance with vm_state building and task_state spawning. [ 1131.527250] env[69475]: INFO nova.compute.manager [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Terminating instance [ 1131.618416] env[69475]: DEBUG nova.network.neutron [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Successfully updated port: f181f990-1cef-4b68-ae07-ea93c380f5a0 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1131.676970] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1131.677238] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1131.677385] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1131.677565] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1131.677711] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1131.677857] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1131.678072] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1131.678233] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1131.678397] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1131.678557] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1131.678731] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1131.683828] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3366f0c7-ff2a-46cb-a3b6-4f3488cf297f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.700413] env[69475]: DEBUG oslo_vmware.api [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1131.700413] env[69475]: value = "task-3508958" [ 1131.700413] env[69475]: _type = "Task" [ 1131.700413] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.708388] env[69475]: DEBUG oslo_vmware.api [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508958, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.775266] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1131.775734] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-721a4a9a-e70e-4ce5-bef5-b238392a7c9a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.784098] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1131.784098] env[69475]: value = "task-3508959" [ 1131.784098] env[69475]: _type = "Task" [ 1131.784098] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1131.793499] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508959, 'name': CloneVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.982924] env[69475]: DEBUG nova.compute.utils [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1131.987060] env[69475]: DEBUG nova.compute.manager [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1131.987060] env[69475]: DEBUG nova.network.neutron [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1132.027123] env[69475]: DEBUG nova.policy [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb93c2f0a3554be8b25cde370a4083ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de2b24bdabce45a7884bdce4ed781c79', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1132.033378] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Acquiring lock "refresh_cache-55d3513b-e0ad-49a7-bd26-147b1b2632cb" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.033550] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Acquired lock "refresh_cache-55d3513b-e0ad-49a7-bd26-147b1b2632cb" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.033726] env[69475]: DEBUG nova.network.neutron [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1132.121058] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "refresh_cache-6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.121058] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquired lock "refresh_cache-6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.121324] env[69475]: DEBUG nova.network.neutron [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1132.211333] env[69475]: DEBUG oslo_vmware.api [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508958, 'name': ReconfigVM_Task, 'duration_secs': 0.180443} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1132.211636] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Updating instance '1459221f-4c35-4a49-a8c0-f8b4ee3e2265' progress to 33 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1132.294211] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508959, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.366935] env[69475]: DEBUG nova.network.neutron [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Successfully created port: 5b51cc5d-6e38-423f-8f69-13541ea8a317 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1132.487576] env[69475]: DEBUG nova.compute.manager [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1132.553416] env[69475]: DEBUG nova.network.neutron [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1132.611874] env[69475]: DEBUG nova.network.neutron [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.660070] env[69475]: DEBUG nova.network.neutron [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1132.721243] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1132.721457] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1132.721618] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1132.721797] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1132.721976] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1132.722171] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1132.722502] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1132.722552] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1132.722759] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1132.723066] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1132.723375] env[69475]: DEBUG nova.virt.hardware [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1132.729773] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Reconfiguring VM instance instance-0000006e to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1132.732816] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-938d8f4b-d737-4395-b9b5-65ead41e42e8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.754886] env[69475]: DEBUG oslo_concurrency.lockutils [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.755507] env[69475]: DEBUG oslo_concurrency.lockutils [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.759243] env[69475]: DEBUG oslo_vmware.api [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1132.759243] env[69475]: value = "task-3508960" [ 1132.759243] env[69475]: _type = "Task" [ 1132.759243] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1132.772439] env[69475]: DEBUG oslo_vmware.api [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508960, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.792123] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44bfe210-47e5-4300-a106-55d41c602eab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.802572] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508959, 'name': CloneVM_Task} progress is 95%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1132.803554] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7981193-7415-49ad-b182-4514109155d0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.834203] env[69475]: DEBUG nova.network.neutron [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Updating instance_info_cache with network_info: [{"id": "f181f990-1cef-4b68-ae07-ea93c380f5a0", "address": "fa:16:3e:ce:4f:49", "network": {"id": "b825e883-3197-4a41-a94b-f363fe49fc0d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1376961670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67d27343d8c04fc9a2bed7a764f6cf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf181f990-1c", "ovs_interfaceid": "f181f990-1cef-4b68-ae07-ea93c380f5a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.836447] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e930e77-74a0-45c7-987f-5edc4d98113c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.844254] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28fda97c-9389-4d21-aa32-239797e5d777 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1132.859339] env[69475]: DEBUG nova.compute.provider_tree [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1133.115049] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Releasing lock "refresh_cache-55d3513b-e0ad-49a7-bd26-147b1b2632cb" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.115499] env[69475]: DEBUG nova.compute.manager [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1133.115702] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1133.116822] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0934b13e-da8c-4b3a-a68c-600e8d06eff9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.125141] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1133.125416] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1fdf6fea-2c96-4ad1-afa9-ab2e1821300c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.131013] env[69475]: DEBUG oslo_vmware.api [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1133.131013] env[69475]: value = "task-3508961" [ 1133.131013] env[69475]: _type = "Task" [ 1133.131013] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.139047] env[69475]: DEBUG oslo_vmware.api [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508961, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.260062] env[69475]: DEBUG nova.compute.utils [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1133.271740] env[69475]: DEBUG oslo_vmware.api [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508960, 'name': ReconfigVM_Task, 'duration_secs': 0.268448} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.272640] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Reconfigured VM instance instance-0000006e to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1133.273506] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be9cf95-39b8-4cf6-bfc0-46b050cd4378 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.297078] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Reconfiguring VM instance instance-0000006e to attach disk [datastore2] 1459221f-4c35-4a49-a8c0-f8b4ee3e2265/1459221f-4c35-4a49-a8c0-f8b4ee3e2265.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1133.300450] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-25de0b27-0972-42f8-acb0-bd1b375f6fbf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.318824] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3508959, 'name': CloneVM_Task, 'duration_secs': 1.369668} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.320046] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Created linked-clone VM from snapshot [ 1133.320368] env[69475]: DEBUG oslo_vmware.api [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1133.320368] env[69475]: value = "task-3508962" [ 1133.320368] env[69475]: _type = "Task" [ 1133.320368] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.321055] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39cc1650-2471-4283-a933-270458985318 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.334021] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Uploading image 6b1310bb-4147-4b4b-9e96-dde2c9000c1d {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1133.335931] env[69475]: DEBUG oslo_vmware.api [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508962, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.336569] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Releasing lock "refresh_cache-6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.336852] env[69475]: DEBUG nova.compute.manager [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Instance network_info: |[{"id": "f181f990-1cef-4b68-ae07-ea93c380f5a0", "address": "fa:16:3e:ce:4f:49", "network": {"id": "b825e883-3197-4a41-a94b-f363fe49fc0d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1376961670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67d27343d8c04fc9a2bed7a764f6cf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf181f990-1c", "ovs_interfaceid": "f181f990-1cef-4b68-ae07-ea93c380f5a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1133.337304] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ce:4f:49', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '94926d5b-bfab-4c04-85b5-0fe89934c8ff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f181f990-1cef-4b68-ae07-ea93c380f5a0', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1133.344857] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1133.345079] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1133.345306] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e676fb90-2c83-4c66-a5ea-e03f4ca50e80 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.362797] env[69475]: DEBUG nova.scheduler.client.report [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1133.372314] env[69475]: DEBUG oslo_vmware.rw_handles [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1133.372314] env[69475]: value = "vm-701131" [ 1133.372314] env[69475]: _type = "VirtualMachine" [ 1133.372314] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1133.372590] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-29b9d597-6647-49cc-8446-297a1ee38509 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.375344] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1133.375344] env[69475]: value = "task-3508963" [ 1133.375344] env[69475]: _type = "Task" [ 1133.375344] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.384483] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508963, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.385728] env[69475]: DEBUG oslo_vmware.rw_handles [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lease: (returnval){ [ 1133.385728] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5243c514-46e6-4234-c291-9683660dc69b" [ 1133.385728] env[69475]: _type = "HttpNfcLease" [ 1133.385728] env[69475]: } obtained for exporting VM: (result){ [ 1133.385728] env[69475]: value = "vm-701131" [ 1133.385728] env[69475]: _type = "VirtualMachine" [ 1133.385728] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1133.386229] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the lease: (returnval){ [ 1133.386229] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5243c514-46e6-4234-c291-9683660dc69b" [ 1133.386229] env[69475]: _type = "HttpNfcLease" [ 1133.386229] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1133.392824] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1133.392824] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5243c514-46e6-4234-c291-9683660dc69b" [ 1133.392824] env[69475]: _type = "HttpNfcLease" [ 1133.392824] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1133.503156] env[69475]: DEBUG nova.compute.manager [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1133.527751] env[69475]: DEBUG nova.virt.hardware [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1133.528245] env[69475]: DEBUG nova.virt.hardware [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1133.528505] env[69475]: DEBUG nova.virt.hardware [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1133.528755] env[69475]: DEBUG nova.virt.hardware [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1133.528997] env[69475]: DEBUG nova.virt.hardware [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1133.529276] env[69475]: DEBUG nova.virt.hardware [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1133.529587] env[69475]: DEBUG nova.virt.hardware [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1133.529819] env[69475]: DEBUG nova.virt.hardware [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1133.530080] env[69475]: DEBUG nova.virt.hardware [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1133.530346] env[69475]: DEBUG nova.virt.hardware [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1133.530604] env[69475]: DEBUG nova.virt.hardware [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1133.531981] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c893fc-e5b2-4e02-9b79-27bba75230c6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.540960] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9573f8cb-347d-4644-ad0b-6c08214c49ad {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.641307] env[69475]: DEBUG oslo_vmware.api [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508961, 'name': PowerOffVM_Task, 'duration_secs': 0.125771} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.642067] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1133.642286] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1133.642616] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b003f03-eb7e-4425-8390-377bcc850cd4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.679051] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1133.679425] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1133.679467] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Deleting the datastore file [datastore2] 55d3513b-e0ad-49a7-bd26-147b1b2632cb {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1133.679734] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-313be046-4c31-4794-aeb4-9af06b7f7b26 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.687564] env[69475]: DEBUG oslo_vmware.api [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for the task: (returnval){ [ 1133.687564] env[69475]: value = "task-3508966" [ 1133.687564] env[69475]: _type = "Task" [ 1133.687564] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.695920] env[69475]: DEBUG oslo_vmware.api [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508966, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.737647] env[69475]: DEBUG nova.compute.manager [req-952f8656-1c00-455c-80af-cb2a917e5762 req-012ec045-4acf-4bae-93d2-71ecf06165c7 service nova] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Received event network-changed-f181f990-1cef-4b68-ae07-ea93c380f5a0 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1133.737848] env[69475]: DEBUG nova.compute.manager [req-952f8656-1c00-455c-80af-cb2a917e5762 req-012ec045-4acf-4bae-93d2-71ecf06165c7 service nova] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Refreshing instance network info cache due to event network-changed-f181f990-1cef-4b68-ae07-ea93c380f5a0. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1133.738189] env[69475]: DEBUG oslo_concurrency.lockutils [req-952f8656-1c00-455c-80af-cb2a917e5762 req-012ec045-4acf-4bae-93d2-71ecf06165c7 service nova] Acquiring lock "refresh_cache-6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.738264] env[69475]: DEBUG oslo_concurrency.lockutils [req-952f8656-1c00-455c-80af-cb2a917e5762 req-012ec045-4acf-4bae-93d2-71ecf06165c7 service nova] Acquired lock "refresh_cache-6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.738398] env[69475]: DEBUG nova.network.neutron [req-952f8656-1c00-455c-80af-cb2a917e5762 req-012ec045-4acf-4bae-93d2-71ecf06165c7 service nova] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Refreshing network info cache for port f181f990-1cef-4b68-ae07-ea93c380f5a0 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1133.763058] env[69475]: DEBUG oslo_concurrency.lockutils [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.833211] env[69475]: DEBUG oslo_vmware.api [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508962, 'name': ReconfigVM_Task, 'duration_secs': 0.405706} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.833508] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Reconfigured VM instance instance-0000006e to attach disk [datastore2] 1459221f-4c35-4a49-a8c0-f8b4ee3e2265/1459221f-4c35-4a49-a8c0-f8b4ee3e2265.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1133.833764] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Updating instance '1459221f-4c35-4a49-a8c0-f8b4ee3e2265' progress to 50 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1133.868168] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.391s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.868691] env[69475]: DEBUG nova.compute.manager [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1133.871437] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.659s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.872902] env[69475]: INFO nova.compute.claims [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1133.885309] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508963, 'name': CreateVM_Task, 'duration_secs': 0.352553} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1133.885548] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1133.886157] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1133.886327] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1133.886635] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1133.887156] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3e0eced4-cf4c-42bf-af6a-be46ab511cfb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.894791] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1133.894791] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d8edd0-7e79-60f7-3cba-7abdebdb266a" [ 1133.894791] env[69475]: _type = "Task" [ 1133.894791] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.896426] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1133.896426] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5243c514-46e6-4234-c291-9683660dc69b" [ 1133.896426] env[69475]: _type = "HttpNfcLease" [ 1133.896426] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1133.899509] env[69475]: DEBUG oslo_vmware.rw_handles [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1133.899509] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5243c514-46e6-4234-c291-9683660dc69b" [ 1133.899509] env[69475]: _type = "HttpNfcLease" [ 1133.899509] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1133.900118] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a7390a7-c2d8-476d-ad63-62023cdef321 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.907022] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d8edd0-7e79-60f7-3cba-7abdebdb266a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.910637] env[69475]: DEBUG oslo_vmware.rw_handles [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f91c94-1b75-b993-3c52-2db5fb856f57/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1133.910805] env[69475]: DEBUG oslo_vmware.rw_handles [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f91c94-1b75-b993-3c52-2db5fb856f57/disk-0.vmdk for reading. {{(pid=69475) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1134.014950] env[69475]: DEBUG nova.network.neutron [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Successfully updated port: 5b51cc5d-6e38-423f-8f69-13541ea8a317 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1134.149150] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-58d7fe18-8d99-49cc-bad8-64fff5b31704 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.196284] env[69475]: DEBUG oslo_vmware.api [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Task: {'id': task-3508966, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.293173} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.196524] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1134.196705] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1134.196878] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1134.197063] env[69475]: INFO nova.compute.manager [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1134.197311] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1134.197488] env[69475]: DEBUG nova.compute.manager [-] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1134.197580] env[69475]: DEBUG nova.network.neutron [-] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1134.213125] env[69475]: DEBUG nova.network.neutron [-] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1134.340403] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0389c61a-f077-4959-9a49-2f747d5f9734 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.364050] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-545ce8ea-b79d-48ef-8541-8cce1cde6e26 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.382845] env[69475]: DEBUG nova.compute.utils [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1134.385844] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Updating instance '1459221f-4c35-4a49-a8c0-f8b4ee3e2265' progress to 67 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1134.389832] env[69475]: DEBUG nova.compute.manager [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1134.391411] env[69475]: DEBUG nova.network.neutron [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1134.407561] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d8edd0-7e79-60f7-3cba-7abdebdb266a, 'name': SearchDatastore_Task, 'duration_secs': 0.014384} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.408159] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1134.408159] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1134.408159] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.409089] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1134.409387] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1134.410278] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-70cf2eb3-dcb6-4635-8800-4635d93551e9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.419265] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1134.419973] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1134.420682] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17938ac6-1735-49bf-8be5-f11bb6062457 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.426583] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1134.426583] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521ac9ba-2090-bc10-b991-33b027251733" [ 1134.426583] env[69475]: _type = "Task" [ 1134.426583] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.439660] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521ac9ba-2090-bc10-b991-33b027251733, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.441242] env[69475]: DEBUG nova.policy [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc345af1a2c34fba98fa191b637a284a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2ba1a4125454d39bc92b6123447d98a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1134.465535] env[69475]: DEBUG nova.network.neutron [req-952f8656-1c00-455c-80af-cb2a917e5762 req-012ec045-4acf-4bae-93d2-71ecf06165c7 service nova] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Updated VIF entry in instance network info cache for port f181f990-1cef-4b68-ae07-ea93c380f5a0. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1134.465903] env[69475]: DEBUG nova.network.neutron [req-952f8656-1c00-455c-80af-cb2a917e5762 req-012ec045-4acf-4bae-93d2-71ecf06165c7 service nova] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Updating instance_info_cache with network_info: [{"id": "f181f990-1cef-4b68-ae07-ea93c380f5a0", "address": "fa:16:3e:ce:4f:49", "network": {"id": "b825e883-3197-4a41-a94b-f363fe49fc0d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1376961670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67d27343d8c04fc9a2bed7a764f6cf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf181f990-1c", "ovs_interfaceid": "f181f990-1cef-4b68-ae07-ea93c380f5a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.520446] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.520446] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1134.520446] env[69475]: DEBUG nova.network.neutron [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1134.715891] env[69475]: DEBUG nova.network.neutron [-] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1134.759988] env[69475]: DEBUG nova.network.neutron [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Successfully created port: f953a932-b0a0-4620-ae5b-9a9cda24d9a4 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1134.827928] env[69475]: DEBUG oslo_concurrency.lockutils [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1134.828230] env[69475]: DEBUG oslo_concurrency.lockutils [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1134.828580] env[69475]: INFO nova.compute.manager [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Attaching volume be3145de-1a5b-4dc5-bbd7-5173190bff83 to /dev/sdb [ 1134.868408] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247975d2-bc1d-4181-9e54-5d47e500cc1d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.875301] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6c6b84-b0ff-48f1-8159-dbd4d64e6a05 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.889294] env[69475]: DEBUG nova.compute.manager [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1134.892418] env[69475]: DEBUG nova.virt.block_device [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Updating existing volume attachment record: 9c4f08c5-6ae9-4551-b7d8-7f762f01a2f2 {{(pid=69475) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1134.944393] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521ac9ba-2090-bc10-b991-33b027251733, 'name': SearchDatastore_Task, 'duration_secs': 0.011422} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.950837] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12d513af-5a62-471a-b2a4-771d27740827 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.961179] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1134.961179] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52fcf031-09a5-816d-12a8-65b20f6116d9" [ 1134.961179] env[69475]: _type = "Task" [ 1134.961179] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.972551] env[69475]: DEBUG nova.network.neutron [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Port 75df31f7-58d6-423b-80c5-e46458f30a93 binding to destination host cpu-1 is already ACTIVE {{(pid=69475) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1134.974543] env[69475]: DEBUG oslo_concurrency.lockutils [req-952f8656-1c00-455c-80af-cb2a917e5762 req-012ec045-4acf-4bae-93d2-71ecf06165c7 service nova] Releasing lock "refresh_cache-6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1134.980536] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52fcf031-09a5-816d-12a8-65b20f6116d9, 'name': SearchDatastore_Task, 'duration_secs': 0.014508} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.980868] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1134.981190] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8/6960992f-a4dd-4a5d-abb8-ff7ae8a414b8.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1134.982901] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b900f33-1bef-4d9c-a0e4-bbd952e94f49 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.988100] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1134.988100] env[69475]: value = "task-3508967" [ 1134.988100] env[69475]: _type = "Task" [ 1134.988100] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.000297] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508967, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.054550] env[69475]: DEBUG nova.network.neutron [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1135.218896] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4780107f-35a9-400b-b768-1491decdca91 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.218896] env[69475]: INFO nova.compute.manager [-] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Took 1.02 seconds to deallocate network for instance. [ 1135.227914] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54213746-c5ac-4b79-809a-346a5be98068 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.272824] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75fc4db0-9bc9-4236-b3cd-78f896ff0fed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.281725] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-378fd5ad-cecf-46b5-8da7-cb93f918a617 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.296590] env[69475]: DEBUG nova.compute.provider_tree [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1135.327263] env[69475]: DEBUG nova.network.neutron [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updating instance_info_cache with network_info: [{"id": "5b51cc5d-6e38-423f-8f69-13541ea8a317", "address": "fa:16:3e:cd:15:12", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b51cc5d-6e", "ovs_interfaceid": "5b51cc5d-6e38-423f-8f69-13541ea8a317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.501629] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508967, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.728029] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.765233] env[69475]: DEBUG nova.compute.manager [req-a0886508-bac0-467e-9c2f-b51dd53d48f1 req-d51a2f43-32be-42b4-9a2f-efbfe9bd99c4 service nova] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Received event network-vif-plugged-5b51cc5d-6e38-423f-8f69-13541ea8a317 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1135.765435] env[69475]: DEBUG oslo_concurrency.lockutils [req-a0886508-bac0-467e-9c2f-b51dd53d48f1 req-d51a2f43-32be-42b4-9a2f-efbfe9bd99c4 service nova] Acquiring lock "0c1ee654-0d2e-40a8-b9a9-291c6a9ab954-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.765758] env[69475]: DEBUG oslo_concurrency.lockutils [req-a0886508-bac0-467e-9c2f-b51dd53d48f1 req-d51a2f43-32be-42b4-9a2f-efbfe9bd99c4 service nova] Lock "0c1ee654-0d2e-40a8-b9a9-291c6a9ab954-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.766065] env[69475]: DEBUG oslo_concurrency.lockutils [req-a0886508-bac0-467e-9c2f-b51dd53d48f1 req-d51a2f43-32be-42b4-9a2f-efbfe9bd99c4 service nova] Lock "0c1ee654-0d2e-40a8-b9a9-291c6a9ab954-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.766317] env[69475]: DEBUG nova.compute.manager [req-a0886508-bac0-467e-9c2f-b51dd53d48f1 req-d51a2f43-32be-42b4-9a2f-efbfe9bd99c4 service nova] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] No waiting events found dispatching network-vif-plugged-5b51cc5d-6e38-423f-8f69-13541ea8a317 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1135.766484] env[69475]: WARNING nova.compute.manager [req-a0886508-bac0-467e-9c2f-b51dd53d48f1 req-d51a2f43-32be-42b4-9a2f-efbfe9bd99c4 service nova] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Received unexpected event network-vif-plugged-5b51cc5d-6e38-423f-8f69-13541ea8a317 for instance with vm_state building and task_state spawning. [ 1135.766738] env[69475]: DEBUG nova.compute.manager [req-a0886508-bac0-467e-9c2f-b51dd53d48f1 req-d51a2f43-32be-42b4-9a2f-efbfe9bd99c4 service nova] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Received event network-changed-5b51cc5d-6e38-423f-8f69-13541ea8a317 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1135.766928] env[69475]: DEBUG nova.compute.manager [req-a0886508-bac0-467e-9c2f-b51dd53d48f1 req-d51a2f43-32be-42b4-9a2f-efbfe9bd99c4 service nova] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Refreshing instance network info cache due to event network-changed-5b51cc5d-6e38-423f-8f69-13541ea8a317. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1135.767287] env[69475]: DEBUG oslo_concurrency.lockutils [req-a0886508-bac0-467e-9c2f-b51dd53d48f1 req-d51a2f43-32be-42b4-9a2f-efbfe9bd99c4 service nova] Acquiring lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1135.799910] env[69475]: DEBUG nova.scheduler.client.report [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1135.830220] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1135.830519] env[69475]: DEBUG nova.compute.manager [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Instance network_info: |[{"id": "5b51cc5d-6e38-423f-8f69-13541ea8a317", "address": "fa:16:3e:cd:15:12", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b51cc5d-6e", "ovs_interfaceid": "5b51cc5d-6e38-423f-8f69-13541ea8a317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1135.830985] env[69475]: DEBUG oslo_concurrency.lockutils [req-a0886508-bac0-467e-9c2f-b51dd53d48f1 req-d51a2f43-32be-42b4-9a2f-efbfe9bd99c4 service nova] Acquired lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1135.831184] env[69475]: DEBUG nova.network.neutron [req-a0886508-bac0-467e-9c2f-b51dd53d48f1 req-d51a2f43-32be-42b4-9a2f-efbfe9bd99c4 service nova] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Refreshing network info cache for port 5b51cc5d-6e38-423f-8f69-13541ea8a317 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1135.832348] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:15:12', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271fe7a0-dfd7-409b-920a-cf83ef1a86a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5b51cc5d-6e38-423f-8f69-13541ea8a317', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1135.840156] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1135.841453] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1135.841684] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-358b5fe4-80d2-446c-9595-325db319e527 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.862279] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1135.862279] env[69475]: value = "task-3508971" [ 1135.862279] env[69475]: _type = "Task" [ 1135.862279] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.870457] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508971, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.901767] env[69475]: DEBUG nova.compute.manager [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1135.931623] env[69475]: DEBUG nova.virt.hardware [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1135.932451] env[69475]: DEBUG nova.virt.hardware [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1135.932451] env[69475]: DEBUG nova.virt.hardware [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1135.932975] env[69475]: DEBUG nova.virt.hardware [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1135.934159] env[69475]: DEBUG nova.virt.hardware [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1135.934159] env[69475]: DEBUG nova.virt.hardware [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1135.934159] env[69475]: DEBUG nova.virt.hardware [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1135.934159] env[69475]: DEBUG nova.virt.hardware [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1135.934668] env[69475]: DEBUG nova.virt.hardware [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1135.935189] env[69475]: DEBUG nova.virt.hardware [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1135.935739] env[69475]: DEBUG nova.virt.hardware [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1135.938044] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597d9326-c8f1-4e28-9059-b266aab310ff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.947790] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216d5f82-561e-4ed6-835b-3a84904fbc1e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.000828] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "1459221f-4c35-4a49-a8c0-f8b4ee3e2265-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.001129] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "1459221f-4c35-4a49-a8c0-f8b4ee3e2265-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.001318] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "1459221f-4c35-4a49-a8c0-f8b4ee3e2265-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.008138] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508967, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.627568} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.008461] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8/6960992f-a4dd-4a5d-abb8-ff7ae8a414b8.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1136.008710] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1136.008967] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8568db36-3886-4025-bef0-8672b60be668 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.016212] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1136.016212] env[69475]: value = "task-3508972" [ 1136.016212] env[69475]: _type = "Task" [ 1136.016212] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.026455] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508972, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.182294] env[69475]: DEBUG nova.compute.manager [req-f83a3cca-3195-4273-90d6-40c8f329f17d req-4c326bf5-3503-46ae-a9a6-4edc4be534fa service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Received event network-vif-plugged-f953a932-b0a0-4620-ae5b-9a9cda24d9a4 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1136.182294] env[69475]: DEBUG oslo_concurrency.lockutils [req-f83a3cca-3195-4273-90d6-40c8f329f17d req-4c326bf5-3503-46ae-a9a6-4edc4be534fa service nova] Acquiring lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.183549] env[69475]: DEBUG oslo_concurrency.lockutils [req-f83a3cca-3195-4273-90d6-40c8f329f17d req-4c326bf5-3503-46ae-a9a6-4edc4be534fa service nova] Lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.004s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.183835] env[69475]: DEBUG oslo_concurrency.lockutils [req-f83a3cca-3195-4273-90d6-40c8f329f17d req-4c326bf5-3503-46ae-a9a6-4edc4be534fa service nova] Lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.184139] env[69475]: DEBUG nova.compute.manager [req-f83a3cca-3195-4273-90d6-40c8f329f17d req-4c326bf5-3503-46ae-a9a6-4edc4be534fa service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] No waiting events found dispatching network-vif-plugged-f953a932-b0a0-4620-ae5b-9a9cda24d9a4 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1136.184451] env[69475]: WARNING nova.compute.manager [req-f83a3cca-3195-4273-90d6-40c8f329f17d req-4c326bf5-3503-46ae-a9a6-4edc4be534fa service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Received unexpected event network-vif-plugged-f953a932-b0a0-4620-ae5b-9a9cda24d9a4 for instance with vm_state building and task_state spawning. [ 1136.266287] env[69475]: DEBUG nova.network.neutron [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Successfully updated port: f953a932-b0a0-4620-ae5b-9a9cda24d9a4 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1136.306838] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.435s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1136.307275] env[69475]: DEBUG nova.compute.manager [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1136.310485] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.145s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.310713] env[69475]: DEBUG nova.objects.instance [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lazy-loading 'resources' on Instance uuid cc85e976-78cf-4289-9674-d697630e7775 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1136.375370] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508971, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.525966] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508972, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075434} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.526326] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1136.527230] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1abd01ba-6eab-450b-b930-5373558ee21c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.550191] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8/6960992f-a4dd-4a5d-abb8-ff7ae8a414b8.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1136.551233] env[69475]: DEBUG nova.network.neutron [req-a0886508-bac0-467e-9c2f-b51dd53d48f1 req-d51a2f43-32be-42b4-9a2f-efbfe9bd99c4 service nova] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updated VIF entry in instance network info cache for port 5b51cc5d-6e38-423f-8f69-13541ea8a317. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1136.551603] env[69475]: DEBUG nova.network.neutron [req-a0886508-bac0-467e-9c2f-b51dd53d48f1 req-d51a2f43-32be-42b4-9a2f-efbfe9bd99c4 service nova] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updating instance_info_cache with network_info: [{"id": "5b51cc5d-6e38-423f-8f69-13541ea8a317", "address": "fa:16:3e:cd:15:12", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b51cc5d-6e", "ovs_interfaceid": "5b51cc5d-6e38-423f-8f69-13541ea8a317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1136.552858] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce78c30d-01cc-4360-94e9-07625e7899f7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.575536] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1136.575536] env[69475]: value = "task-3508973" [ 1136.575536] env[69475]: _type = "Task" [ 1136.575536] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.584419] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508973, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.769770] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.770172] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.770172] env[69475]: DEBUG nova.network.neutron [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1136.815070] env[69475]: DEBUG nova.compute.utils [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1136.820647] env[69475]: DEBUG nova.compute.manager [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1136.821038] env[69475]: DEBUG nova.network.neutron [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1136.871768] env[69475]: DEBUG nova.policy [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b42f0b943ec4de7ac656612ca56a34b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9101c50cbfe74c99b1e1a528cb5b5994', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1136.879531] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508971, 'name': CreateVM_Task, 'duration_secs': 0.592444} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1136.879809] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1136.880364] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1136.880565] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1136.880830] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1136.881103] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7249123a-23ad-4531-b6cb-ef4b6316c755 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1136.887920] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1136.887920] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522a1e1b-3ad9-38eb-8c09-608820519201" [ 1136.887920] env[69475]: _type = "Task" [ 1136.887920] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1136.895798] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522a1e1b-3ad9-38eb-8c09-608820519201, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.048799] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "refresh_cache-1459221f-4c35-4a49-a8c0-f8b4ee3e2265" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.048903] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "refresh_cache-1459221f-4c35-4a49-a8c0-f8b4ee3e2265" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1137.049104] env[69475]: DEBUG nova.network.neutron [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1137.057069] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a525540-d885-4349-aa09-aa1d588053c2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.065756] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebbdf161-090b-4006-892d-b7ad194f1a62 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.069939] env[69475]: DEBUG oslo_concurrency.lockutils [req-a0886508-bac0-467e-9c2f-b51dd53d48f1 req-d51a2f43-32be-42b4-9a2f-efbfe9bd99c4 service nova] Releasing lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1137.107103] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2843954c-11fc-4691-8b7c-fbae215ba58c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.114837] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508973, 'name': ReconfigVM_Task, 'duration_secs': 0.282715} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.120916] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8/6960992f-a4dd-4a5d-abb8-ff7ae8a414b8.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1137.121063] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1fc4975e-acab-4da2-ac74-d895b8157261 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.123677] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a760e0-4bfc-4f27-bf3f-40b5751e4ba5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.139285] env[69475]: DEBUG nova.compute.provider_tree [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1137.141941] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1137.141941] env[69475]: value = "task-3508974" [ 1137.141941] env[69475]: _type = "Task" [ 1137.141941] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.152465] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508974, 'name': Rename_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.177227] env[69475]: DEBUG nova.network.neutron [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Successfully created port: face26ac-c45b-4932-b32e-bd2d172da60d {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1137.313921] env[69475]: DEBUG nova.network.neutron [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1137.321933] env[69475]: DEBUG nova.compute.manager [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1137.398416] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522a1e1b-3ad9-38eb-8c09-608820519201, 'name': SearchDatastore_Task, 'duration_secs': 0.012155} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.398727] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1137.399057] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1137.399299] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1137.399477] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1137.399660] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1137.399949] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7e574219-07d3-4a0e-a839-d2a8254e7c23 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.409737] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1137.409941] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1137.410943] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26320613-fab8-4847-8537-29cbf8a31fe7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.419614] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1137.419614] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f99b08-0ca0-e2c8-dfef-5389d597547b" [ 1137.419614] env[69475]: _type = "Task" [ 1137.419614] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.429853] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f99b08-0ca0-e2c8-dfef-5389d597547b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.515866] env[69475]: DEBUG nova.network.neutron [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Updating instance_info_cache with network_info: [{"id": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "address": "fa:16:3e:74:59:f0", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf953a932-b0", "ovs_interfaceid": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.647754] env[69475]: DEBUG nova.scheduler.client.report [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1137.659832] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508974, 'name': Rename_Task, 'duration_secs': 0.139147} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.660148] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1137.660438] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ffd3627-de3c-4ee3-ba8b-27dabfb399b4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.668505] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1137.668505] env[69475]: value = "task-3508976" [ 1137.668505] env[69475]: _type = "Task" [ 1137.668505] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.681048] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508976, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.841416] env[69475]: DEBUG nova.network.neutron [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Updating instance_info_cache with network_info: [{"id": "75df31f7-58d6-423b-80c5-e46458f30a93", "address": "fa:16:3e:84:d9:20", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75df31f7-58", "ovs_interfaceid": "75df31f7-58d6-423b-80c5-e46458f30a93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1137.931132] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f99b08-0ca0-e2c8-dfef-5389d597547b, 'name': SearchDatastore_Task, 'duration_secs': 0.012905} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1137.931908] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa72d5aa-7c7e-49f6-ad58-d58f8b904d90 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.938075] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1137.938075] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52042012-c1a3-7715-6fca-be6ce2de4d19" [ 1137.938075] env[69475]: _type = "Task" [ 1137.938075] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1137.946472] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52042012-c1a3-7715-6fca-be6ce2de4d19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.018830] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.019260] env[69475]: DEBUG nova.compute.manager [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Instance network_info: |[{"id": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "address": "fa:16:3e:74:59:f0", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf953a932-b0", "ovs_interfaceid": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1138.019705] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:59:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4954661-ff70-43dd-bc60-8cbca6b9cbfa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f953a932-b0a0-4620-ae5b-9a9cda24d9a4', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1138.028015] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1138.028353] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1138.028700] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82e4f6d0-1674-4f9f-a951-74c3df4923b3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.051916] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1138.051916] env[69475]: value = "task-3508977" [ 1138.051916] env[69475]: _type = "Task" [ 1138.051916] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.061398] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508977, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.156233] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.846s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.158673] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 7.576s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.158889] env[69475]: DEBUG nova.objects.instance [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69475) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1138.179988] env[69475]: DEBUG oslo_vmware.api [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3508976, 'name': PowerOnVM_Task, 'duration_secs': 0.464077} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.181085] env[69475]: INFO nova.scheduler.client.report [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleted allocations for instance cc85e976-78cf-4289-9674-d697630e7775 [ 1138.182145] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1138.182503] env[69475]: INFO nova.compute.manager [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Took 7.02 seconds to spawn the instance on the hypervisor. [ 1138.182575] env[69475]: DEBUG nova.compute.manager [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1138.186257] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba3b3309-bfac-4b7b-9f13-290a45212cca {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.215695] env[69475]: DEBUG nova.compute.manager [req-23dba787-7a9b-438b-a40b-4952886560cd req-1029df21-073d-41f5-b205-df5503f8d862 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Received event network-changed-f953a932-b0a0-4620-ae5b-9a9cda24d9a4 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1138.215893] env[69475]: DEBUG nova.compute.manager [req-23dba787-7a9b-438b-a40b-4952886560cd req-1029df21-073d-41f5-b205-df5503f8d862 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Refreshing instance network info cache due to event network-changed-f953a932-b0a0-4620-ae5b-9a9cda24d9a4. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1138.216257] env[69475]: DEBUG oslo_concurrency.lockutils [req-23dba787-7a9b-438b-a40b-4952886560cd req-1029df21-073d-41f5-b205-df5503f8d862 service nova] Acquiring lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1138.216355] env[69475]: DEBUG oslo_concurrency.lockutils [req-23dba787-7a9b-438b-a40b-4952886560cd req-1029df21-073d-41f5-b205-df5503f8d862 service nova] Acquired lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1138.216512] env[69475]: DEBUG nova.network.neutron [req-23dba787-7a9b-438b-a40b-4952886560cd req-1029df21-073d-41f5-b205-df5503f8d862 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Refreshing network info cache for port f953a932-b0a0-4620-ae5b-9a9cda24d9a4 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1138.334192] env[69475]: DEBUG nova.compute.manager [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1138.344080] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "refresh_cache-1459221f-4c35-4a49-a8c0-f8b4ee3e2265" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.362297] env[69475]: DEBUG nova.virt.hardware [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1138.362542] env[69475]: DEBUG nova.virt.hardware [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1138.362699] env[69475]: DEBUG nova.virt.hardware [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1138.362905] env[69475]: DEBUG nova.virt.hardware [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1138.363077] env[69475]: DEBUG nova.virt.hardware [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1138.363232] env[69475]: DEBUG nova.virt.hardware [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1138.363443] env[69475]: DEBUG nova.virt.hardware [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1138.363679] env[69475]: DEBUG nova.virt.hardware [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1138.363866] env[69475]: DEBUG nova.virt.hardware [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1138.364041] env[69475]: DEBUG nova.virt.hardware [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1138.364224] env[69475]: DEBUG nova.virt.hardware [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1138.365141] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75e399a0-3774-47e7-a549-3ade2ad87006 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.373179] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da8f4d0-1ea1-49d6-868e-6004729a883a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.448362] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52042012-c1a3-7715-6fca-be6ce2de4d19, 'name': SearchDatastore_Task, 'duration_secs': 0.013891} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.448610] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1138.448865] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954/0c1ee654-0d2e-40a8-b9a9-291c6a9ab954.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1138.449136] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-537aa282-a3e9-4b03-8f20-1e99821ddf83 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.456230] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1138.456230] env[69475]: value = "task-3508978" [ 1138.456230] env[69475]: _type = "Task" [ 1138.456230] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.464524] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508978, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.562095] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508977, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.649517] env[69475]: DEBUG nova.network.neutron [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Successfully updated port: face26ac-c45b-4932-b32e-bd2d172da60d {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1138.693768] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b896869b-6389-41c2-8142-ca6b3e6200ff tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "cc85e976-78cf-4289-9674-d697630e7775" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.032s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.703533] env[69475]: INFO nova.compute.manager [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Took 16.93 seconds to build instance. [ 1138.870514] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d024c276-b01a-47f5-99cb-f030bf5ea6ec {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.874422] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1138.874636] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1138.893140] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bea9f34-ae75-47c6-86b1-5b55a9f1265d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.901362] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Updating instance '1459221f-4c35-4a49-a8c0-f8b4ee3e2265' progress to 83 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1138.933230] env[69475]: DEBUG oslo_concurrency.lockutils [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "96533442-eb53-4bc2-bda3-71efc973d403" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.933464] env[69475]: DEBUG oslo_concurrency.lockutils [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "96533442-eb53-4bc2-bda3-71efc973d403" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.934040] env[69475]: DEBUG oslo_concurrency.lockutils [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "96533442-eb53-4bc2-bda3-71efc973d403-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1138.934705] env[69475]: DEBUG oslo_concurrency.lockutils [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "96533442-eb53-4bc2-bda3-71efc973d403-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1138.934917] env[69475]: DEBUG oslo_concurrency.lockutils [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "96533442-eb53-4bc2-bda3-71efc973d403-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1138.936970] env[69475]: INFO nova.compute.manager [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Terminating instance [ 1138.967030] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508978, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.066130] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508977, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.157682] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "refresh_cache-d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.157881] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquired lock "refresh_cache-d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.157977] env[69475]: DEBUG nova.network.neutron [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1139.170115] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1bd0d622-c382-42fe-af46-4117870e4cb1 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.010s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.170115] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.442s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1139.170115] env[69475]: DEBUG nova.objects.instance [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Lazy-loading 'resources' on Instance uuid 55d3513b-e0ad-49a7-bd26-147b1b2632cb {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1139.197871] env[69475]: DEBUG nova.network.neutron [req-23dba787-7a9b-438b-a40b-4952886560cd req-1029df21-073d-41f5-b205-df5503f8d862 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Updated VIF entry in instance network info cache for port f953a932-b0a0-4620-ae5b-9a9cda24d9a4. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1139.198248] env[69475]: DEBUG nova.network.neutron [req-23dba787-7a9b-438b-a40b-4952886560cd req-1029df21-073d-41f5-b205-df5503f8d862 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Updating instance_info_cache with network_info: [{"id": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "address": "fa:16:3e:74:59:f0", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf953a932-b0", "ovs_interfaceid": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.207301] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3cab6b05-df20-4f99-9ef3-0d49993486ee tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.448s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.384050] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.384200] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.384410] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.384561] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.384930] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.385223] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.385223] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69475) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1139.385313] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager.update_available_resource {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.408483] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1139.408789] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58d7ceda-7b5b-41e8-9cc6-4ce0e79769d9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.417303] env[69475]: DEBUG oslo_vmware.api [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1139.417303] env[69475]: value = "task-3508979" [ 1139.417303] env[69475]: _type = "Task" [ 1139.417303] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.426079] env[69475]: DEBUG oslo_vmware.api [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508979, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.441297] env[69475]: DEBUG nova.compute.manager [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1139.441609] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1139.442462] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bfa16d7-1f00-406b-9a75-8657dd9109d7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.447251] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Volume attach. Driver type: vmdk {{(pid=69475) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1139.447477] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701134', 'volume_id': 'be3145de-1a5b-4dc5-bbd7-5173190bff83', 'name': 'volume-be3145de-1a5b-4dc5-bbd7-5173190bff83', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '92020fc6-aff6-437f-9e26-a5b61ea7e76f', 'attached_at': '', 'detached_at': '', 'volume_id': 'be3145de-1a5b-4dc5-bbd7-5173190bff83', 'serial': 'be3145de-1a5b-4dc5-bbd7-5173190bff83'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1139.448606] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98fb46f-080d-4943-963a-77d81abe634d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.454199] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1139.454199] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e74f2380-fe3f-4ee7-b659-f94c6593013c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.471673] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bc5b574-41b3-4df7-8567-9d22fdb2d015 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.474137] env[69475]: DEBUG oslo_vmware.api [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1139.474137] env[69475]: value = "task-3508980" [ 1139.474137] env[69475]: _type = "Task" [ 1139.474137] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.493569] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508978, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.659739} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.501221] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] volume-be3145de-1a5b-4dc5-bbd7-5173190bff83/volume-be3145de-1a5b-4dc5-bbd7-5173190bff83.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1139.502062] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954/0c1ee654-0d2e-40a8-b9a9-291c6a9ab954.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1139.502297] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1139.502588] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c9b13df-e88e-4fff-bf6f-a079794c5550 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.518754] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3923430c-87ff-459a-9437-7e2c72f49341 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.520991] env[69475]: DEBUG oslo_vmware.api [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508980, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.526256] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1139.526256] env[69475]: value = "task-3508981" [ 1139.526256] env[69475]: _type = "Task" [ 1139.526256] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.527498] env[69475]: DEBUG oslo_vmware.api [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1139.527498] env[69475]: value = "task-3508982" [ 1139.527498] env[69475]: _type = "Task" [ 1139.527498] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.538707] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508981, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.543039] env[69475]: DEBUG oslo_vmware.api [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508982, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.564526] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508977, 'name': CreateVM_Task, 'duration_secs': 1.346483} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.564719] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1139.565460] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1139.565634] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1139.565965] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1139.566307] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8266d4a0-bd09-4ac7-b2a7-2b727ad88644 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.571480] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1139.571480] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5247e8e6-f231-212b-5691-eac62e9f806e" [ 1139.571480] env[69475]: _type = "Task" [ 1139.571480] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1139.580718] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5247e8e6-f231-212b-5691-eac62e9f806e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.701018] env[69475]: DEBUG oslo_concurrency.lockutils [req-23dba787-7a9b-438b-a40b-4952886560cd req-1029df21-073d-41f5-b205-df5503f8d862 service nova] Releasing lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1139.713354] env[69475]: DEBUG nova.network.neutron [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1139.889390] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1139.936869] env[69475]: DEBUG oslo_vmware.api [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508979, 'name': PowerOnVM_Task} progress is 90%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.939687] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99337262-bf59-4e36-a4e9-8193e9c1aad8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.947669] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39fc2f1b-f8e2-427a-9304-3d18a6b1d9ee {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.979924] env[69475]: DEBUG nova.network.neutron [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Updating instance_info_cache with network_info: [{"id": "face26ac-c45b-4932-b32e-bd2d172da60d", "address": "fa:16:3e:83:66:4e", "network": {"id": "5996fb14-ef37-4ad6-bdc7-a1fe757f6765", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-259172914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9101c50cbfe74c99b1e1a528cb5b5994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55520f67-d092-4eb7-940f-d7cceaa1ca1c", "external-id": "nsx-vlan-transportzone-717", "segmentation_id": 717, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapface26ac-c4", "ovs_interfaceid": "face26ac-c45b-4932-b32e-bd2d172da60d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1139.984723] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b51218f-d5b1-4266-bfd7-177917662be6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.994934] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce52f35-ab95-4e17-921d-0f1578e728b5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.998659] env[69475]: DEBUG oslo_vmware.api [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508980, 'name': PowerOffVM_Task, 'duration_secs': 0.221862} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.999535] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1139.999707] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1140.000309] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-286ea618-202b-4d9a-b2dc-afd558610308 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.009567] env[69475]: DEBUG nova.compute.provider_tree [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1140.039602] env[69475]: DEBUG oslo_vmware.api [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508982, 'name': ReconfigVM_Task, 'duration_secs': 0.430562} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.042567] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Reconfigured VM instance instance-00000069 to attach disk [datastore2] volume-be3145de-1a5b-4dc5-bbd7-5173190bff83/volume-be3145de-1a5b-4dc5-bbd7-5173190bff83.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1140.048051] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508981, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092778} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.048051] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ce5cdf4-fde4-42c2-8746-984de0f64065 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.057720] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1140.058835] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc4a466-d42b-4098-9c8d-d9990677f066 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.066220] env[69475]: DEBUG oslo_vmware.api [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1140.066220] env[69475]: value = "task-3508984" [ 1140.066220] env[69475]: _type = "Task" [ 1140.066220] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.086315] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954/0c1ee654-0d2e-40a8-b9a9-291c6a9ab954.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1140.096559] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-594506f3-e1ad-43b1-889b-a8b9ada04137 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.112016] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1140.112689] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1140.112689] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleting the datastore file [datastore2] 96533442-eb53-4bc2-bda3-71efc973d403 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1140.113213] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf0776ff-9d4a-4118-9a09-a6bc6aa58133 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.121042] env[69475]: DEBUG oslo_vmware.api [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508984, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.126552] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5247e8e6-f231-212b-5691-eac62e9f806e, 'name': SearchDatastore_Task, 'duration_secs': 0.012267} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.126903] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1140.126903] env[69475]: value = "task-3508986" [ 1140.126903] env[69475]: _type = "Task" [ 1140.126903] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.127248] env[69475]: DEBUG oslo_vmware.api [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1140.127248] env[69475]: value = "task-3508985" [ 1140.127248] env[69475]: _type = "Task" [ 1140.127248] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.127560] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.127830] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1140.128139] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.128353] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.128580] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1140.128956] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-13e23c80-2907-44ae-9fdb-55953e2b1c00 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.141965] env[69475]: DEBUG oslo_vmware.api [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508985, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.145336] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508986, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.151143] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1140.151370] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1140.152076] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-adb1707f-aaac-422e-adba-e769ad160e19 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.157325] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1140.157325] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526a91cd-e773-c4f2-e3f1-0d4b9ec26244" [ 1140.157325] env[69475]: _type = "Task" [ 1140.157325] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.165457] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526a91cd-e773-c4f2-e3f1-0d4b9ec26244, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.247343] env[69475]: DEBUG nova.compute.manager [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Received event network-vif-plugged-face26ac-c45b-4932-b32e-bd2d172da60d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1140.247572] env[69475]: DEBUG oslo_concurrency.lockutils [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] Acquiring lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1140.247783] env[69475]: DEBUG oslo_concurrency.lockutils [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.247960] env[69475]: DEBUG oslo_concurrency.lockutils [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.248122] env[69475]: DEBUG nova.compute.manager [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] No waiting events found dispatching network-vif-plugged-face26ac-c45b-4932-b32e-bd2d172da60d {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1140.248290] env[69475]: WARNING nova.compute.manager [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Received unexpected event network-vif-plugged-face26ac-c45b-4932-b32e-bd2d172da60d for instance with vm_state building and task_state spawning. [ 1140.248450] env[69475]: DEBUG nova.compute.manager [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Received event network-changed-face26ac-c45b-4932-b32e-bd2d172da60d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1140.248600] env[69475]: DEBUG nova.compute.manager [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Refreshing instance network info cache due to event network-changed-face26ac-c45b-4932-b32e-bd2d172da60d. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1140.248761] env[69475]: DEBUG oslo_concurrency.lockutils [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] Acquiring lock "refresh_cache-d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1140.427732] env[69475]: DEBUG oslo_vmware.api [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3508979, 'name': PowerOnVM_Task, 'duration_secs': 0.571009} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.428088] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1140.428210] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9ddeaebd-2311-4b7e-b210-82b0e8211130 tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Updating instance '1459221f-4c35-4a49-a8c0-f8b4ee3e2265' progress to 100 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1140.439180] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1140.439412] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1140.488951] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Releasing lock "refresh_cache-d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.489334] env[69475]: DEBUG nova.compute.manager [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Instance network_info: |[{"id": "face26ac-c45b-4932-b32e-bd2d172da60d", "address": "fa:16:3e:83:66:4e", "network": {"id": "5996fb14-ef37-4ad6-bdc7-a1fe757f6765", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-259172914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9101c50cbfe74c99b1e1a528cb5b5994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55520f67-d092-4eb7-940f-d7cceaa1ca1c", "external-id": "nsx-vlan-transportzone-717", "segmentation_id": 717, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapface26ac-c4", "ovs_interfaceid": "face26ac-c45b-4932-b32e-bd2d172da60d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1140.490235] env[69475]: DEBUG oslo_concurrency.lockutils [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] Acquired lock "refresh_cache-d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1140.490587] env[69475]: DEBUG nova.network.neutron [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Refreshing network info cache for port face26ac-c45b-4932-b32e-bd2d172da60d {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1140.492996] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:66:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55520f67-d092-4eb7-940f-d7cceaa1ca1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'face26ac-c45b-4932-b32e-bd2d172da60d', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1140.500532] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Creating folder: Project (9101c50cbfe74c99b1e1a528cb5b5994). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1140.503950] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-29f1ff20-8f9f-4cdf-bf45-2acdff1c5ebb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.513667] env[69475]: DEBUG nova.scheduler.client.report [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1140.519192] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Created folder: Project (9101c50cbfe74c99b1e1a528cb5b5994) in parent group-v700823. [ 1140.519411] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Creating folder: Instances. Parent ref: group-v701137. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1140.519895] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e21c0f6b-8dfd-427a-897a-eb1b3abd5736 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.530699] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Created folder: Instances in parent group-v701137. [ 1140.530699] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1140.530923] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1140.531291] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c2550ecd-ae3c-4f95-9977-a2036b48061c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.553361] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1140.553361] env[69475]: value = "task-3508989" [ 1140.553361] env[69475]: _type = "Task" [ 1140.553361] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.562601] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508989, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.586803] env[69475]: DEBUG oslo_vmware.api [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3508984, 'name': ReconfigVM_Task, 'duration_secs': 0.156271} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.589535] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701134', 'volume_id': 'be3145de-1a5b-4dc5-bbd7-5173190bff83', 'name': 'volume-be3145de-1a5b-4dc5-bbd7-5173190bff83', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '92020fc6-aff6-437f-9e26-a5b61ea7e76f', 'attached_at': '', 'detached_at': '', 'volume_id': 'be3145de-1a5b-4dc5-bbd7-5173190bff83', 'serial': 'be3145de-1a5b-4dc5-bbd7-5173190bff83'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1140.647190] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508986, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.650410] env[69475]: DEBUG oslo_vmware.api [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3508985, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.410598} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.650643] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1140.650829] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1140.651024] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1140.651190] env[69475]: INFO nova.compute.manager [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Took 1.21 seconds to destroy the instance on the hypervisor. [ 1140.651443] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1140.651628] env[69475]: DEBUG nova.compute.manager [-] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1140.651724] env[69475]: DEBUG nova.network.neutron [-] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1140.668999] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526a91cd-e773-c4f2-e3f1-0d4b9ec26244, 'name': SearchDatastore_Task, 'duration_secs': 0.062438} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.670374] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6756ea00-f6b9-44e9-938c-f9f30ccf1f91 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.679836] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1140.679836] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524e456b-68b1-4130-98d7-6ab3b014b182" [ 1140.679836] env[69475]: _type = "Task" [ 1140.679836] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.688917] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524e456b-68b1-4130-98d7-6ab3b014b182, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.746514] env[69475]: DEBUG nova.network.neutron [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Updated VIF entry in instance network info cache for port face26ac-c45b-4932-b32e-bd2d172da60d. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1140.746988] env[69475]: DEBUG nova.network.neutron [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Updating instance_info_cache with network_info: [{"id": "face26ac-c45b-4932-b32e-bd2d172da60d", "address": "fa:16:3e:83:66:4e", "network": {"id": "5996fb14-ef37-4ad6-bdc7-a1fe757f6765", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-259172914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9101c50cbfe74c99b1e1a528cb5b5994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55520f67-d092-4eb7-940f-d7cceaa1ca1c", "external-id": "nsx-vlan-transportzone-717", "segmentation_id": 717, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapface26ac-c4", "ovs_interfaceid": "face26ac-c45b-4932-b32e-bd2d172da60d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1140.944215] env[69475]: DEBUG nova.compute.utils [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1141.021601] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.852s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.024054] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.135s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.024260] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.025443] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69475) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1141.025443] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa8ec57-6fb6-4d68-8a2f-a3d0bd5570a8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.034259] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8fd0d25-4675-44f1-b7ac-37139ae51bcd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.050216] env[69475]: INFO nova.scheduler.client.report [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Deleted allocations for instance 55d3513b-e0ad-49a7-bd26-147b1b2632cb [ 1141.051742] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078f00d7-e963-4c0a-a5da-306da7a3f4dd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.068231] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3508989, 'name': CreateVM_Task, 'duration_secs': 0.4041} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.070081] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1141.070819] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.070988] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1141.071321] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1141.072575] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6040bf92-1ed4-4ff4-b405-afc154236ef8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.076072] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-810dee4d-4058-43dd-9310-d25119526dcc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.081306] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1141.081306] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52462d8a-05dd-a54c-8469-7950ce051924" [ 1141.081306] env[69475]: _type = "Task" [ 1141.081306] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.110487] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178998MB free_disk=89GB free_vcpus=48 pci_devices=None {{(pid=69475) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1141.110672] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.110843] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.124104] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52462d8a-05dd-a54c-8469-7950ce051924, 'name': SearchDatastore_Task, 'duration_secs': 0.02491} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.124416] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.125067] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1141.125067] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.138485] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508986, 'name': ReconfigVM_Task, 'duration_secs': 0.817816} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.138696] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954/0c1ee654-0d2e-40a8-b9a9-291c6a9ab954.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1141.139339] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-01242060-ba9d-4e7a-8a80-857f9846ffa1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.147021] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1141.147021] env[69475]: value = "task-3508990" [ 1141.147021] env[69475]: _type = "Task" [ 1141.147021] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.157617] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508990, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.160531] env[69475]: DEBUG nova.compute.manager [req-991f0509-4589-4c88-96ca-7a1c8292a858 req-971ef0a7-8c85-4343-9591-ff26449f0bfe service nova] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Received event network-vif-deleted-cfc6e6cb-798d-4b99-8764-5faf560ca662 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1141.160674] env[69475]: INFO nova.compute.manager [req-991f0509-4589-4c88-96ca-7a1c8292a858 req-971ef0a7-8c85-4343-9591-ff26449f0bfe service nova] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Neutron deleted interface cfc6e6cb-798d-4b99-8764-5faf560ca662; detaching it from the instance and deleting it from the info cache [ 1141.160828] env[69475]: DEBUG nova.network.neutron [req-991f0509-4589-4c88-96ca-7a1c8292a858 req-971ef0a7-8c85-4343-9591-ff26449f0bfe service nova] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.191173] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524e456b-68b1-4130-98d7-6ab3b014b182, 'name': SearchDatastore_Task, 'duration_secs': 0.013084} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.191398] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.191653] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 579b4d3e-bd76-4f5d-b972-7b289bca04a0/579b4d3e-bd76-4f5d-b972-7b289bca04a0.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1141.191929] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1141.192128] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1141.192373] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6cdc46d6-0022-4aa5-9519-5d64e86d0970 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.194441] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5778a572-af07-4886-a35c-1fe16176ac8b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.201300] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1141.201300] env[69475]: value = "task-3508991" [ 1141.201300] env[69475]: _type = "Task" [ 1141.201300] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.205675] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1141.205880] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1141.206937] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50b39744-4463-447b-a013-3da3cca36b86 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.212301] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508991, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.215443] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1141.215443] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52368fbe-1754-cba4-0d79-79ce1c7714c3" [ 1141.215443] env[69475]: _type = "Task" [ 1141.215443] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.223363] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52368fbe-1754-cba4-0d79-79ce1c7714c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.249851] env[69475]: DEBUG oslo_concurrency.lockutils [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] Releasing lock "refresh_cache-d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1141.249851] env[69475]: DEBUG nova.compute.manager [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Received event network-changed-f181f990-1cef-4b68-ae07-ea93c380f5a0 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1141.250046] env[69475]: DEBUG nova.compute.manager [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Refreshing instance network info cache due to event network-changed-f181f990-1cef-4b68-ae07-ea93c380f5a0. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1141.250345] env[69475]: DEBUG oslo_concurrency.lockutils [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] Acquiring lock "refresh_cache-6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.250531] env[69475]: DEBUG oslo_concurrency.lockutils [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] Acquired lock "refresh_cache-6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1141.250729] env[69475]: DEBUG nova.network.neutron [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Refreshing network info cache for port f181f990-1cef-4b68-ae07-ea93c380f5a0 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1141.446457] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.471627] env[69475]: DEBUG nova.network.neutron [-] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1141.566375] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a43f9d85-d1ac-470c-9c9c-c2a2b95247c4 tempest-ServerShowV257Test-1712138627 tempest-ServerShowV257Test-1712138627-project-member] Lock "55d3513b-e0ad-49a7-bd26-147b1b2632cb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.044s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.657662] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508990, 'name': Rename_Task, 'duration_secs': 0.28477} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.658182] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1141.659277] env[69475]: DEBUG nova.objects.instance [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lazy-loading 'flavor' on Instance uuid 92020fc6-aff6-437f-9e26-a5b61ea7e76f {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1141.660776] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-98da73f9-111e-4383-acf4-d4e04a433a3b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.664301] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7cdcad71-1dd2-4cd0-81db-cde06c723969 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.674793] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e8b12af-96ba-4bea-97a4-e1aee356e439 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.687523] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1141.687523] env[69475]: value = "task-3508992" [ 1141.687523] env[69475]: _type = "Task" [ 1141.687523] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.700509] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508992, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.714889] env[69475]: DEBUG nova.compute.manager [req-991f0509-4589-4c88-96ca-7a1c8292a858 req-971ef0a7-8c85-4343-9591-ff26449f0bfe service nova] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Detach interface failed, port_id=cfc6e6cb-798d-4b99-8764-5faf560ca662, reason: Instance 96533442-eb53-4bc2-bda3-71efc973d403 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1141.732721] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508991, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.737610] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52368fbe-1754-cba4-0d79-79ce1c7714c3, 'name': SearchDatastore_Task, 'duration_secs': 0.0435} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.738516] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09647061-fc9f-468d-bfe7-ea7fa0be019a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1141.744686] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1141.744686] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52541ac6-e05e-9821-80ce-3f300454b70b" [ 1141.744686] env[69475]: _type = "Task" [ 1141.744686] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1141.755983] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52541ac6-e05e-9821-80ce-3f300454b70b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.974189] env[69475]: INFO nova.compute.manager [-] [instance: 96533442-eb53-4bc2-bda3-71efc973d403] Took 1.32 seconds to deallocate network for instance. [ 1142.021457] env[69475]: DEBUG nova.network.neutron [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Updated VIF entry in instance network info cache for port f181f990-1cef-4b68-ae07-ea93c380f5a0. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1142.021676] env[69475]: DEBUG nova.network.neutron [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Updating instance_info_cache with network_info: [{"id": "f181f990-1cef-4b68-ae07-ea93c380f5a0", "address": "fa:16:3e:ce:4f:49", "network": {"id": "b825e883-3197-4a41-a94b-f363fe49fc0d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1376961670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.130", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67d27343d8c04fc9a2bed7a764f6cf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf181f990-1c", "ovs_interfaceid": "f181f990-1cef-4b68-ae07-ea93c380f5a0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.124912] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Applying migration context for instance 1459221f-4c35-4a49-a8c0-f8b4ee3e2265 as it has an incoming, in-progress migration d83e0b92-e5cf-482f-9e30-acc92b1aae0c. Migration status is finished {{(pid=69475) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1046}} [ 1142.126713] env[69475]: INFO nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Updating resource usage from migration d83e0b92-e5cf-482f-9e30-acc92b1aae0c [ 1142.150039] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 8d50b322-fa03-4e48-b74b-a63578e4701c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1142.150220] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 4066a18f-acc5-49b5-941c-0711f29bdcd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1142.150329] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 4100fb43-1dae-40b1-8caa-11dd67962274 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1142.150519] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance f8a82046-4589-45d2-a7a3-466fe4d8f9c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1142.150703] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 4f091501-351c-45b8-9f64-4d28d4623df8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1142.150888] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance e10a197a-a9b7-43ce-b8a8-ce186619feb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1142.151112] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 92020fc6-aff6-437f-9e26-a5b61ea7e76f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1142.151309] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 460d4b93-b18a-4965-9e2b-8c6175ccc91f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1142.151505] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 96533442-eb53-4bc2-bda3-71efc973d403 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1142.151646] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Migration d83e0b92-e5cf-482f-9e30-acc92b1aae0c is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1742}} [ 1142.151765] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1142.151955] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 1459221f-4c35-4a49-a8c0-f8b4ee3e2265 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1142.152188] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 579b4d3e-bd76-4f5d-b972-7b289bca04a0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1142.152398] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1142.152580] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance d63ddc35-06b3-43a2-bdd5-a91cf4047a4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1142.152828] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1142.153012] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3456MB phys_disk=200GB used_disk=15GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1142.167061] env[69475]: DEBUG oslo_concurrency.lockutils [None req-06022206-3418-4bda-ab52-ce4b401e1d30 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.339s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.202461] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508992, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.224886] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508991, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.769057} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.227882] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 579b4d3e-bd76-4f5d-b972-7b289bca04a0/579b4d3e-bd76-4f5d-b972-7b289bca04a0.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1142.228147] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1142.228794] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "1459221f-4c35-4a49-a8c0-f8b4ee3e2265" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.229013] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "1459221f-4c35-4a49-a8c0-f8b4ee3e2265" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.229214] env[69475]: DEBUG nova.compute.manager [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Going to confirm migration 7 {{(pid=69475) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1142.230708] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3d144a4c-fe53-4dd1-aef0-d9e65cae01d6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.238470] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1142.238470] env[69475]: value = "task-3508993" [ 1142.238470] env[69475]: _type = "Task" [ 1142.238470] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.257575] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508993, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.268950] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52541ac6-e05e-9821-80ce-3f300454b70b, 'name': SearchDatastore_Task, 'duration_secs': 0.058538} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.269363] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1142.269706] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] d63ddc35-06b3-43a2-bdd5-a91cf4047a4b/d63ddc35-06b3-43a2-bdd5-a91cf4047a4b.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1142.270059] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f473ec99-667f-4daf-84be-a923416c4371 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.281889] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1142.281889] env[69475]: value = "task-3508994" [ 1142.281889] env[69475]: _type = "Task" [ 1142.281889] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.295893] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3508994, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.369058] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f533e63-f75b-4d9d-bc39-bcd05f740ed5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.376884] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1727d143-b7a4-484b-a311-22e8eec52f87 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.408894] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7714117-b6e9-4115-87b5-8f68875a34ef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.416688] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f1e58c8-0fe4-4b63-b77b-b870c103cdc1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.430444] env[69475]: DEBUG nova.compute.provider_tree [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1142.482151] env[69475]: DEBUG oslo_concurrency.lockutils [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.514433] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1142.514433] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1142.514433] env[69475]: INFO nova.compute.manager [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Attaching volume a6c984d3-67d7-42ec-8b22-82a4405a0b69 to /dev/sdb [ 1142.523580] env[69475]: DEBUG oslo_concurrency.lockutils [req-360cc741-bde7-409c-9c3e-c92b43558072 req-86193380-e640-4552-8839-91983edecde7 service nova] Releasing lock "refresh_cache-6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1142.546060] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c3f9a60-5d38-47f3-a112-4c183f6b1102 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.552996] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06429d1-67db-4865-9e53-8a8178de3e29 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.567444] env[69475]: DEBUG nova.virt.block_device [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating existing volume attachment record: 793fb098-f734-42cb-84eb-3369f519c67f {{(pid=69475) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1142.612406] env[69475]: INFO nova.compute.manager [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Rescuing [ 1142.612701] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.612852] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.613048] env[69475]: DEBUG nova.network.neutron [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1142.705177] env[69475]: DEBUG oslo_vmware.api [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3508992, 'name': PowerOnVM_Task, 'duration_secs': 0.732802} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.706406] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1142.706754] env[69475]: INFO nova.compute.manager [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Took 9.20 seconds to spawn the instance on the hypervisor. [ 1142.706986] env[69475]: DEBUG nova.compute.manager [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1142.707996] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529f8b40-9dde-444e-9915-d3b984e55484 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.758695] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508993, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089793} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.758695] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1142.758695] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa39929c-57a5-49a8-9963-74c14df3170a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.790271] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] 579b4d3e-bd76-4f5d-b972-7b289bca04a0/579b4d3e-bd76-4f5d-b972-7b289bca04a0.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1142.795020] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "refresh_cache-1459221f-4c35-4a49-a8c0-f8b4ee3e2265" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.795020] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquired lock "refresh_cache-1459221f-4c35-4a49-a8c0-f8b4ee3e2265" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.795020] env[69475]: DEBUG nova.network.neutron [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1142.795020] env[69475]: DEBUG nova.objects.instance [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lazy-loading 'info_cache' on Instance uuid 1459221f-4c35-4a49-a8c0-f8b4ee3e2265 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1142.795656] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8d42c3f8-3ab9-413a-8406-0396d94aacb6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.828050] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3508994, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.829984] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1142.829984] env[69475]: value = "task-3508996" [ 1142.829984] env[69475]: _type = "Task" [ 1142.829984] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.840507] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508996, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.933751] env[69475]: DEBUG nova.scheduler.client.report [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1143.228934] env[69475]: INFO nova.compute.manager [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Took 20.66 seconds to build instance. [ 1143.305799] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3508994, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.671263} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.306123] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] d63ddc35-06b3-43a2-bdd5-a91cf4047a4b/d63ddc35-06b3-43a2-bdd5-a91cf4047a4b.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1143.306379] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1143.306667] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-35001ea5-ea73-4494-afaf-e039fc237580 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.313841] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1143.313841] env[69475]: value = "task-3508999" [ 1143.313841] env[69475]: _type = "Task" [ 1143.313841] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.325436] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3508999, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.341694] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3508996, 'name': ReconfigVM_Task, 'duration_secs': 0.343756} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.342129] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Reconfigured VM instance instance-00000072 to attach disk [datastore2] 579b4d3e-bd76-4f5d-b972-7b289bca04a0/579b4d3e-bd76-4f5d-b972-7b289bca04a0.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1143.342766] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49ff559b-c2de-4b35-a84e-2326eceebcf6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.349587] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1143.349587] env[69475]: value = "task-3509000" [ 1143.349587] env[69475]: _type = "Task" [ 1143.349587] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.358702] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509000, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.383658] env[69475]: DEBUG nova.network.neutron [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Updating instance_info_cache with network_info: [{"id": "b2b04f22-0a1e-4c90-b84f-5d119fc7e528", "address": "fa:16:3e:2e:52:22", "network": {"id": "a5ecc342-ff26-4d68-9810-30407b73463d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-941429832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a68f54aa603f46468f50c83cd4fa3e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2b04f22-0a", "ovs_interfaceid": "b2b04f22-0a1e-4c90-b84f-5d119fc7e528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.439543] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69475) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1143.439823] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.329s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.440225] env[69475]: DEBUG oslo_concurrency.lockutils [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.958s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.440561] env[69475]: DEBUG nova.objects.instance [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lazy-loading 'resources' on Instance uuid 96533442-eb53-4bc2-bda3-71efc973d403 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1143.441844] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1143.442168] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Cleaning up deleted instances {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1143.567244] env[69475]: DEBUG nova.network.neutron [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Updating instance_info_cache with network_info: [{"id": "75df31f7-58d6-423b-80c5-e46458f30a93", "address": "fa:16:3e:84:d9:20", "network": {"id": "a3c936c7-212d-4bda-b517-fafafc6fc179", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1672979804-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f8ffeef220f04d9eb22ef69b68e9c34a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b80dd748-3d7e-4a23-a38d-9e79a3881452", "external-id": "nsx-vlan-transportzone-497", "segmentation_id": 497, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75df31f7-58", "ovs_interfaceid": "75df31f7-58d6-423b-80c5-e46458f30a93", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1143.734154] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8a822313-8040-4f59-b087-c138b283d6d9 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.179s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1143.824061] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3508999, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085159} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.824369] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1143.825172] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034dd0a8-2a8b-4951-8fb0-8bcedaf3d8c3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.848861] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Reconfiguring VM instance instance-00000074 to attach disk [datastore2] d63ddc35-06b3-43a2-bdd5-a91cf4047a4b/d63ddc35-06b3-43a2-bdd5-a91cf4047a4b.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1143.849106] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c8ce6124-a3b1-44fa-844f-8e066f65e011 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.871229] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509000, 'name': Rename_Task, 'duration_secs': 0.154875} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.872389] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1143.872685] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1143.872685] env[69475]: value = "task-3509001" [ 1143.872685] env[69475]: _type = "Task" [ 1143.872685] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.872976] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d48af1f-be21-48dc-b0b3-46bd520e19ef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.881588] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509001, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.882703] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1143.882703] env[69475]: value = "task-3509002" [ 1143.882703] env[69475]: _type = "Task" [ 1143.882703] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.885950] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Releasing lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.890527] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509002, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.968912] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] There are 57 instances to clean {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1143.969226] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: cc85e976-78cf-4289-9674-d697630e7775] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1144.070460] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Releasing lock "refresh_cache-1459221f-4c35-4a49-a8c0-f8b4ee3e2265" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.070733] env[69475]: DEBUG nova.objects.instance [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lazy-loading 'migration_context' on Instance uuid 1459221f-4c35-4a49-a8c0-f8b4ee3e2265 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1144.164118] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5088a572-99c5-4097-8e1e-2ea5b8be9f9b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.172083] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc0ad759-612f-4397-aa81-eb5e5cf2d4cb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.205300] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06adbac-6054-4660-ad19-837c3b3c2fab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.213818] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58cd1ea-8501-4db5-a528-91fc74540ef8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.229900] env[69475]: DEBUG nova.compute.provider_tree [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1144.244353] env[69475]: DEBUG nova.compute.manager [req-310b2e6b-f76e-46a6-9dd8-e53f624a6cd8 req-d8534c7f-fe9d-4f96-a769-b4d7c5c3940a service nova] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Received event network-changed-5b51cc5d-6e38-423f-8f69-13541ea8a317 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1144.244545] env[69475]: DEBUG nova.compute.manager [req-310b2e6b-f76e-46a6-9dd8-e53f624a6cd8 req-d8534c7f-fe9d-4f96-a769-b4d7c5c3940a service nova] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Refreshing instance network info cache due to event network-changed-5b51cc5d-6e38-423f-8f69-13541ea8a317. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1144.244747] env[69475]: DEBUG oslo_concurrency.lockutils [req-310b2e6b-f76e-46a6-9dd8-e53f624a6cd8 req-d8534c7f-fe9d-4f96-a769-b4d7c5c3940a service nova] Acquiring lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.244886] env[69475]: DEBUG oslo_concurrency.lockutils [req-310b2e6b-f76e-46a6-9dd8-e53f624a6cd8 req-d8534c7f-fe9d-4f96-a769-b4d7c5c3940a service nova] Acquired lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.245227] env[69475]: DEBUG nova.network.neutron [req-310b2e6b-f76e-46a6-9dd8-e53f624a6cd8 req-d8534c7f-fe9d-4f96-a769-b4d7c5c3940a service nova] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Refreshing network info cache for port 5b51cc5d-6e38-423f-8f69-13541ea8a317 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1144.293326] env[69475]: DEBUG oslo_vmware.rw_handles [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f91c94-1b75-b993-3c52-2db5fb856f57/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1144.294408] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b8ec1d-b823-4abc-91e9-73d964f4949e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.301141] env[69475]: DEBUG oslo_vmware.rw_handles [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f91c94-1b75-b993-3c52-2db5fb856f57/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1144.301372] env[69475]: ERROR oslo_vmware.rw_handles [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f91c94-1b75-b993-3c52-2db5fb856f57/disk-0.vmdk due to incomplete transfer. [ 1144.301611] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6e7f5049-c5d2-429c-9a57-3f4c15cafa62 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.323109] env[69475]: DEBUG oslo_vmware.rw_handles [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f91c94-1b75-b993-3c52-2db5fb856f57/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1144.323386] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Uploaded image 6b1310bb-4147-4b4b-9e96-dde2c9000c1d to the Glance image server {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1144.325996] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1144.326296] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d3b898d3-bf3d-4755-9f98-8acc910d17f5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.332298] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1144.332298] env[69475]: value = "task-3509003" [ 1144.332298] env[69475]: _type = "Task" [ 1144.332298] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.340478] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509003, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.386749] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509001, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.394924] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509002, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.475587] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 55d3513b-e0ad-49a7-bd26-147b1b2632cb] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1144.573429] env[69475]: DEBUG nova.objects.base [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Object Instance<1459221f-4c35-4a49-a8c0-f8b4ee3e2265> lazy-loaded attributes: info_cache,migration_context {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1144.574460] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18c6a0c1-dcbb-44e2-847a-16289d8f1b59 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.594276] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90cca531-d28b-435c-b1ba-6ace4c12174e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.600101] env[69475]: DEBUG oslo_vmware.api [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1144.600101] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f3e84-5057-5ab9-995a-47af90c239cf" [ 1144.600101] env[69475]: _type = "Task" [ 1144.600101] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.608107] env[69475]: DEBUG oslo_vmware.api [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f3e84-5057-5ab9-995a-47af90c239cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.733601] env[69475]: DEBUG nova.scheduler.client.report [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1144.842236] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509003, 'name': Destroy_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.885850] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509001, 'name': ReconfigVM_Task, 'duration_secs': 0.675868} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.885850] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Reconfigured VM instance instance-00000074 to attach disk [datastore2] d63ddc35-06b3-43a2-bdd5-a91cf4047a4b/d63ddc35-06b3-43a2-bdd5-a91cf4047a4b.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1144.885850] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a2b783a-5b8d-42c8-af41-a2c22ed34827 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.896038] env[69475]: DEBUG oslo_vmware.api [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509002, 'name': PowerOnVM_Task, 'duration_secs': 0.538324} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.896698] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1144.896914] env[69475]: INFO nova.compute.manager [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Took 8.99 seconds to spawn the instance on the hypervisor. [ 1144.897115] env[69475]: DEBUG nova.compute.manager [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1144.897477] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1144.897477] env[69475]: value = "task-3509004" [ 1144.897477] env[69475]: _type = "Task" [ 1144.897477] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.898277] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29786d0-191a-4cb2-bd6a-b9c4121a7c0c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.914802] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509004, 'name': Rename_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.970534] env[69475]: DEBUG nova.network.neutron [req-310b2e6b-f76e-46a6-9dd8-e53f624a6cd8 req-d8534c7f-fe9d-4f96-a769-b4d7c5c3940a service nova] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updated VIF entry in instance network info cache for port 5b51cc5d-6e38-423f-8f69-13541ea8a317. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1144.970911] env[69475]: DEBUG nova.network.neutron [req-310b2e6b-f76e-46a6-9dd8-e53f624a6cd8 req-d8534c7f-fe9d-4f96-a769-b4d7c5c3940a service nova] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updating instance_info_cache with network_info: [{"id": "5b51cc5d-6e38-423f-8f69-13541ea8a317", "address": "fa:16:3e:cd:15:12", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b51cc5d-6e", "ovs_interfaceid": "5b51cc5d-6e38-423f-8f69-13541ea8a317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.979834] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 74b0a7b0-5b15-4f4c-a4b1-b0bf28bc8d6c] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1145.109880] env[69475]: DEBUG oslo_vmware.api [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]527f3e84-5057-5ab9-995a-47af90c239cf, 'name': SearchDatastore_Task, 'duration_secs': 0.009285} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.110180] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1145.239817] env[69475]: DEBUG oslo_concurrency.lockutils [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.800s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.242251] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.132s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1145.343055] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509003, 'name': Destroy_Task, 'duration_secs': 0.719575} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.343279] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Destroyed the VM [ 1145.343525] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1145.343777] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-41f2350e-5219-4894-aa48-dda478c43483 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.350114] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1145.350114] env[69475]: value = "task-3509006" [ 1145.350114] env[69475]: _type = "Task" [ 1145.350114] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.359509] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509006, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.372714] env[69475]: INFO nova.scheduler.client.report [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleted allocations for instance 96533442-eb53-4bc2-bda3-71efc973d403 [ 1145.410299] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509004, 'name': Rename_Task, 'duration_secs': 0.178341} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.410586] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1145.410830] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ae113431-3e6c-4233-84d2-518162d9a8a3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.420279] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1145.420279] env[69475]: value = "task-3509007" [ 1145.420279] env[69475]: _type = "Task" [ 1145.420279] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.425678] env[69475]: INFO nova.compute.manager [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Took 22.85 seconds to build instance. [ 1145.426784] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1145.427754] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48f861c2-d8a3-415a-a63a-f0a995067acc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.433956] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509007, 'name': PowerOnVM_Task} progress is 33%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.439231] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1145.439231] env[69475]: value = "task-3509008" [ 1145.439231] env[69475]: _type = "Task" [ 1145.439231] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1145.448154] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509008, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.473848] env[69475]: DEBUG oslo_concurrency.lockutils [req-310b2e6b-f76e-46a6-9dd8-e53f624a6cd8 req-d8534c7f-fe9d-4f96-a769-b4d7c5c3940a service nova] Releasing lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1145.483070] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 60516e16-bd7e-4fc1-b95f-603fb5ef6ae9] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1145.861885] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509006, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.883682] env[69475]: DEBUG oslo_concurrency.lockutils [None req-235e489c-5f48-4252-bbc2-a022b14d0e42 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "96533442-eb53-4bc2-bda3-71efc973d403" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.950s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.927550] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e547201-262a-4626-8170-6dbeb9e69b7e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.930681] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2588f857-9501-448a-99a4-9b413cb91d96 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.382s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1145.935484] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509007, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1145.938479] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e88f66-eff0-4022-8500-2e23825e0bea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.950407] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509008, 'name': PowerOffVM_Task, 'duration_secs': 0.255425} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1145.976104] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1145.977617] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6a7fa0-65c3-428d-abc8-1ae90e9bbff8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.980661] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf39bda4-451b-4ffa-a445-8d89f5cd9794 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.987697] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 226afd68-34d8-482e-89f9-0c45a300a803] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1146.007605] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b68dd10-e299-4c0f-8a66-3ece53af4ad4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.012809] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 97013703-3506-4441-b80c-cbb5c7e29bdf] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1146.015060] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1bb9fc2-6462-4180-a8f8-326fc3fdab66 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.034727] env[69475]: DEBUG nova.compute.provider_tree [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1146.060886] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1146.061473] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-80834ed5-821c-4cfd-8825-81a8bf0d55f5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.067899] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1146.067899] env[69475]: value = "task-3509009" [ 1146.067899] env[69475]: _type = "Task" [ 1146.067899] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.077061] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] VM already powered off {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1146.077876] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1146.077876] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.077876] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1146.078233] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1146.078552] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ae3a35e-9772-4703-88c0-a9440a9676b3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.087243] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1146.087440] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1146.088219] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7ecb8a0-35f7-479d-a2a0-855f904108f6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.093824] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1146.093824] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52012bfb-e97d-35c4-7358-1f97c63b5335" [ 1146.093824] env[69475]: _type = "Task" [ 1146.093824] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.102461] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52012bfb-e97d-35c4-7358-1f97c63b5335, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.360404] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509006, 'name': RemoveSnapshot_Task, 'duration_secs': 0.616972} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.360705] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1146.360939] env[69475]: DEBUG nova.compute.manager [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1146.361696] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b642518-b3b0-4d3f-9709-4f89d055ac18 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.429916] env[69475]: DEBUG oslo_vmware.api [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509007, 'name': PowerOnVM_Task, 'duration_secs': 0.815111} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.430212] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1146.430405] env[69475]: INFO nova.compute.manager [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Took 8.10 seconds to spawn the instance on the hypervisor. [ 1146.430580] env[69475]: DEBUG nova.compute.manager [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1146.431329] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1caeaa2-43d9-45b7-9116-8bc62b71e719 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.519912] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 951c225b-d930-449f-81b5-4f28f9dd27e5] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1146.538172] env[69475]: DEBUG nova.scheduler.client.report [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1146.604305] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52012bfb-e97d-35c4-7358-1f97c63b5335, 'name': SearchDatastore_Task, 'duration_secs': 0.014321} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1146.605129] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a0bc127-f59b-4004-86fe-850d8765cfd6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.610168] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1146.610168] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526f6972-4431-b3d7-80fe-53976c55ca47" [ 1146.610168] env[69475]: _type = "Task" [ 1146.610168] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.618773] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526f6972-4431-b3d7-80fe-53976c55ca47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.794491] env[69475]: DEBUG nova.compute.manager [req-1b807ba2-9067-4040-b5df-6bd52b7aa4cc req-5f0da260-4a52-4303-adae-1dd41243b369 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Received event network-changed-f953a932-b0a0-4620-ae5b-9a9cda24d9a4 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1146.794682] env[69475]: DEBUG nova.compute.manager [req-1b807ba2-9067-4040-b5df-6bd52b7aa4cc req-5f0da260-4a52-4303-adae-1dd41243b369 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Refreshing instance network info cache due to event network-changed-f953a932-b0a0-4620-ae5b-9a9cda24d9a4. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1146.794903] env[69475]: DEBUG oslo_concurrency.lockutils [req-1b807ba2-9067-4040-b5df-6bd52b7aa4cc req-5f0da260-4a52-4303-adae-1dd41243b369 service nova] Acquiring lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.795294] env[69475]: DEBUG oslo_concurrency.lockutils [req-1b807ba2-9067-4040-b5df-6bd52b7aa4cc req-5f0da260-4a52-4303-adae-1dd41243b369 service nova] Acquired lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1146.795549] env[69475]: DEBUG nova.network.neutron [req-1b807ba2-9067-4040-b5df-6bd52b7aa4cc req-5f0da260-4a52-4303-adae-1dd41243b369 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Refreshing network info cache for port f953a932-b0a0-4620-ae5b-9a9cda24d9a4 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1146.873586] env[69475]: INFO nova.compute.manager [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Shelve offloading [ 1146.947844] env[69475]: INFO nova.compute.manager [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Took 22.76 seconds to build instance. [ 1147.023077] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: cd0e8c6a-700a-47f8-9a4c-054b84a59a7f] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1147.121509] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526f6972-4431-b3d7-80fe-53976c55ca47, 'name': SearchDatastore_Task, 'duration_secs': 0.028044} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1147.121782] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1147.122059] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 92020fc6-aff6-437f-9e26-a5b61ea7e76f/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk. {{(pid=69475) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1147.122333] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ae065e96-c26f-4b84-98e7-2693b68d3c92 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.129228] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1147.129228] env[69475]: value = "task-3509010" [ 1147.129228] env[69475]: _type = "Task" [ 1147.129228] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.137512] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509010, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.377926] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1147.381207] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a7d7e794-406f-4616-afac-741a72253dec {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.388483] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1147.388483] env[69475]: value = "task-3509011" [ 1147.388483] env[69475]: _type = "Task" [ 1147.388483] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.400502] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] VM already powered off {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1147.400502] env[69475]: DEBUG nova.compute.manager [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1147.401487] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e29a07-aa71-4733-9c7f-e64843ff5097 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.409152] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1147.409314] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquired lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1147.409501] env[69475]: DEBUG nova.network.neutron [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1147.435633] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "1275b0c3-6d9f-48e5-acf0-2ee747b1bb77" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.436066] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "1275b0c3-6d9f-48e5-acf0-2ee747b1bb77" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.450071] env[69475]: DEBUG oslo_concurrency.lockutils [None req-84cb80b0-efe9-4228-af27-d1ea5bdfad55 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.269s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.527765] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: ecf115fc-4ca1-41e2-ac42-82ec8154356e] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1147.549563] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.307s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.576935] env[69475]: DEBUG nova.network.neutron [req-1b807ba2-9067-4040-b5df-6bd52b7aa4cc req-5f0da260-4a52-4303-adae-1dd41243b369 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Updated VIF entry in instance network info cache for port f953a932-b0a0-4620-ae5b-9a9cda24d9a4. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1147.577365] env[69475]: DEBUG nova.network.neutron [req-1b807ba2-9067-4040-b5df-6bd52b7aa4cc req-5f0da260-4a52-4303-adae-1dd41243b369 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Updating instance_info_cache with network_info: [{"id": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "address": "fa:16:3e:74:59:f0", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf953a932-b0", "ovs_interfaceid": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1147.620265] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Volume attach. Driver type: vmdk {{(pid=69475) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1147.620529] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701141', 'volume_id': 'a6c984d3-67d7-42ec-8b22-82a4405a0b69', 'name': 'volume-a6c984d3-67d7-42ec-8b22-82a4405a0b69', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '460d4b93-b18a-4965-9e2b-8c6175ccc91f', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6c984d3-67d7-42ec-8b22-82a4405a0b69', 'serial': 'a6c984d3-67d7-42ec-8b22-82a4405a0b69'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1147.621484] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeab0d69-d329-49c5-a585-2c36228efdde {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.645711] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d894bd7d-7c5c-4694-8daf-eadb11fcec42 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.654665] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509010, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.680827] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] volume-a6c984d3-67d7-42ec-8b22-82a4405a0b69/volume-a6c984d3-67d7-42ec-8b22-82a4405a0b69.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1147.681184] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5908ba6e-4e05-425a-a5a2-829af7ac6f72 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.700509] env[69475]: DEBUG oslo_vmware.api [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1147.700509] env[69475]: value = "task-3509012" [ 1147.700509] env[69475]: _type = "Task" [ 1147.700509] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1147.709174] env[69475]: DEBUG oslo_vmware.api [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509012, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1147.939974] env[69475]: DEBUG nova.compute.manager [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1147.948969] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "eadfee29-c7fc-4d33-8869-7ea8e753554c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.949237] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "eadfee29-c7fc-4d33-8869-7ea8e753554c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.031860] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 20b37e69-5870-4f63-aeba-9293615da478] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1148.081941] env[69475]: DEBUG oslo_concurrency.lockutils [req-1b807ba2-9067-4040-b5df-6bd52b7aa4cc req-5f0da260-4a52-4303-adae-1dd41243b369 service nova] Releasing lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1148.112139] env[69475]: INFO nova.scheduler.client.report [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleted allocation for migration d83e0b92-e5cf-482f-9e30-acc92b1aae0c [ 1148.138500] env[69475]: DEBUG nova.network.neutron [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Updating instance_info_cache with network_info: [{"id": "e27cfabc-cd13-4aaa-b9e1-eebffb18225e", "address": "fa:16:3e:9f:f8:e1", "network": {"id": "62083b30-e966-4802-8960-367f1137a879", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-79648352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572bc56741e24d57a4d01f202c8fb78d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape27cfabc-cd", "ovs_interfaceid": "e27cfabc-cd13-4aaa-b9e1-eebffb18225e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.149160] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509010, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.574896} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1148.149709] env[69475]: INFO nova.virt.vmwareapi.ds_util [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 92020fc6-aff6-437f-9e26-a5b61ea7e76f/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk. [ 1148.150795] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daad75cb-77c6-418c-8b90-523e850b84d0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.182428] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 92020fc6-aff6-437f-9e26-a5b61ea7e76f/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1148.183159] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5f185350-7b3a-463e-95f8-39548d7edb0a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1148.201954] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1148.201954] env[69475]: value = "task-3509013" [ 1148.201954] env[69475]: _type = "Task" [ 1148.201954] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1148.212821] env[69475]: DEBUG oslo_vmware.api [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509012, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.216208] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509013, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.453465] env[69475]: DEBUG nova.compute.manager [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1148.461802] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.462066] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.463500] env[69475]: INFO nova.compute.claims [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1148.535793] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 24ef554b-30bf-4e28-856e-98eb7ec2618b] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1148.619819] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "1459221f-4c35-4a49-a8c0-f8b4ee3e2265" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.391s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.644875] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Releasing lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1148.716930] env[69475]: DEBUG oslo_vmware.api [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509012, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.720157] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509013, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.821221] env[69475]: DEBUG nova.compute.manager [req-473886f1-cfeb-44a1-aafc-d97e10064cc7 req-a3589a40-41e6-4175-ae55-48d7d73e910b service nova] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Received event network-changed-face26ac-c45b-4932-b32e-bd2d172da60d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1148.821448] env[69475]: DEBUG nova.compute.manager [req-473886f1-cfeb-44a1-aafc-d97e10064cc7 req-a3589a40-41e6-4175-ae55-48d7d73e910b service nova] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Refreshing instance network info cache due to event network-changed-face26ac-c45b-4932-b32e-bd2d172da60d. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1148.821662] env[69475]: DEBUG oslo_concurrency.lockutils [req-473886f1-cfeb-44a1-aafc-d97e10064cc7 req-a3589a40-41e6-4175-ae55-48d7d73e910b service nova] Acquiring lock "refresh_cache-d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.821998] env[69475]: DEBUG oslo_concurrency.lockutils [req-473886f1-cfeb-44a1-aafc-d97e10064cc7 req-a3589a40-41e6-4175-ae55-48d7d73e910b service nova] Acquired lock "refresh_cache-d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1148.822339] env[69475]: DEBUG nova.network.neutron [req-473886f1-cfeb-44a1-aafc-d97e10064cc7 req-a3589a40-41e6-4175-ae55-48d7d73e910b service nova] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Refreshing network info cache for port face26ac-c45b-4932-b32e-bd2d172da60d {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1148.938486] env[69475]: DEBUG nova.compute.manager [req-5d31d34c-3a42-4781-b625-58e8c6963dff req-f9fc7fbd-cf11-4da9-a7eb-c3c661b78879 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Received event network-vif-unplugged-e27cfabc-cd13-4aaa-b9e1-eebffb18225e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1148.938861] env[69475]: DEBUG oslo_concurrency.lockutils [req-5d31d34c-3a42-4781-b625-58e8c6963dff req-f9fc7fbd-cf11-4da9-a7eb-c3c661b78879 service nova] Acquiring lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1148.939314] env[69475]: DEBUG oslo_concurrency.lockutils [req-5d31d34c-3a42-4781-b625-58e8c6963dff req-f9fc7fbd-cf11-4da9-a7eb-c3c661b78879 service nova] Lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1148.939627] env[69475]: DEBUG oslo_concurrency.lockutils [req-5d31d34c-3a42-4781-b625-58e8c6963dff req-f9fc7fbd-cf11-4da9-a7eb-c3c661b78879 service nova] Lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.940771] env[69475]: DEBUG nova.compute.manager [req-5d31d34c-3a42-4781-b625-58e8c6963dff req-f9fc7fbd-cf11-4da9-a7eb-c3c661b78879 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] No waiting events found dispatching network-vif-unplugged-e27cfabc-cd13-4aaa-b9e1-eebffb18225e {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1148.940771] env[69475]: WARNING nova.compute.manager [req-5d31d34c-3a42-4781-b625-58e8c6963dff req-f9fc7fbd-cf11-4da9-a7eb-c3c661b78879 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Received unexpected event network-vif-unplugged-e27cfabc-cd13-4aaa-b9e1-eebffb18225e for instance with vm_state shelved and task_state shelving_offloading. [ 1148.979851] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.015961] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1149.016919] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cf7e33a-763c-4eb2-ae52-f02f43409cff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.024133] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1149.024380] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d0f5e6c-0843-4864-848c-69ae6c6b243e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.040479] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 1a0fe9ff-d20a-4aed-84a9-3b05df7dfb35] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1149.086395] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1149.086600] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1149.086775] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Deleting the datastore file [datastore1] e10a197a-a9b7-43ce-b8a8-ce186619feb9 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1149.087034] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-159d7ac0-4b4a-452c-aae2-73b982f218e4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.093139] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1149.093139] env[69475]: value = "task-3509015" [ 1149.093139] env[69475]: _type = "Task" [ 1149.093139] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.101021] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509015, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.214524] env[69475]: DEBUG oslo_vmware.api [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509012, 'name': ReconfigVM_Task, 'duration_secs': 1.14656} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.215357] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Reconfigured VM instance instance-0000006b to attach disk [datastore2] volume-a6c984d3-67d7-42ec-8b22-82a4405a0b69/volume-a6c984d3-67d7-42ec-8b22-82a4405a0b69.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1149.224467] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84e98a14-1ca3-4407-8d51-19d85454e788 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.234804] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509013, 'name': ReconfigVM_Task, 'duration_secs': 0.794946} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.236189] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 92020fc6-aff6-437f-9e26-a5b61ea7e76f/afa9d32c-9f39-44fb-bf3b-50d35842a59f-rescue.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1149.241029] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb8e2f5-f132-458b-b8b6-63962af7783e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.245133] env[69475]: DEBUG oslo_vmware.api [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1149.245133] env[69475]: value = "task-3509016" [ 1149.245133] env[69475]: _type = "Task" [ 1149.245133] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.272851] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb985609-bcfe-487a-88e5-d7c670d0fbd5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.286791] env[69475]: DEBUG oslo_vmware.api [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509016, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.291943] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1149.291943] env[69475]: value = "task-3509017" [ 1149.291943] env[69475]: _type = "Task" [ 1149.291943] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.300619] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509017, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.538861] env[69475]: DEBUG nova.network.neutron [req-473886f1-cfeb-44a1-aafc-d97e10064cc7 req-a3589a40-41e6-4175-ae55-48d7d73e910b service nova] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Updated VIF entry in instance network info cache for port face26ac-c45b-4932-b32e-bd2d172da60d. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1149.539242] env[69475]: DEBUG nova.network.neutron [req-473886f1-cfeb-44a1-aafc-d97e10064cc7 req-a3589a40-41e6-4175-ae55-48d7d73e910b service nova] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Updating instance_info_cache with network_info: [{"id": "face26ac-c45b-4932-b32e-bd2d172da60d", "address": "fa:16:3e:83:66:4e", "network": {"id": "5996fb14-ef37-4ad6-bdc7-a1fe757f6765", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-259172914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9101c50cbfe74c99b1e1a528cb5b5994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55520f67-d092-4eb7-940f-d7cceaa1ca1c", "external-id": "nsx-vlan-transportzone-717", "segmentation_id": 717, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapface26ac-c4", "ovs_interfaceid": "face26ac-c45b-4932-b32e-bd2d172da60d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.546379] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 44bcaa36-ecd9-448b-b589-7c32066ede1d] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1149.605604] env[69475]: DEBUG oslo_vmware.api [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509015, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.319203} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.605852] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1149.606075] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1149.606284] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1149.626503] env[69475]: INFO nova.scheduler.client.report [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Deleted allocations for instance e10a197a-a9b7-43ce-b8a8-ce186619feb9 [ 1149.655134] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fabb125d-ae29-44dd-93d4-efd11604e547 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.663319] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357978a7-393c-4fbe-be52-636d4b8280e1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.696206] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b9bb052-0398-42c0-a0c5-e84b19508dbb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.703668] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa677683-303d-4686-b365-b7574ca9e60a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.718337] env[69475]: DEBUG nova.compute.provider_tree [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1149.744335] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "1459221f-4c35-4a49-a8c0-f8b4ee3e2265" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.744561] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "1459221f-4c35-4a49-a8c0-f8b4ee3e2265" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.744759] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "1459221f-4c35-4a49-a8c0-f8b4ee3e2265-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.744935] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "1459221f-4c35-4a49-a8c0-f8b4ee3e2265-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.745205] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "1459221f-4c35-4a49-a8c0-f8b4ee3e2265-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.750093] env[69475]: INFO nova.compute.manager [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Terminating instance [ 1149.756500] env[69475]: DEBUG oslo_vmware.api [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509016, 'name': ReconfigVM_Task, 'duration_secs': 0.153487} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.756779] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701141', 'volume_id': 'a6c984d3-67d7-42ec-8b22-82a4405a0b69', 'name': 'volume-a6c984d3-67d7-42ec-8b22-82a4405a0b69', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '460d4b93-b18a-4965-9e2b-8c6175ccc91f', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6c984d3-67d7-42ec-8b22-82a4405a0b69', 'serial': 'a6c984d3-67d7-42ec-8b22-82a4405a0b69'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1149.801645] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509017, 'name': ReconfigVM_Task, 'duration_secs': 0.343313} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.801792] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1149.802416] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7762e48c-c7c3-4ac9-a787-293e340deaef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.808780] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1149.808780] env[69475]: value = "task-3509018" [ 1149.808780] env[69475]: _type = "Task" [ 1149.808780] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.816291] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509018, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.042093] env[69475]: DEBUG oslo_concurrency.lockutils [req-473886f1-cfeb-44a1-aafc-d97e10064cc7 req-a3589a40-41e6-4175-ae55-48d7d73e910b service nova] Releasing lock "refresh_cache-d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.049866] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 8963b50c-29ca-49fd-8289-1e1b7583ca25] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1150.130868] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1150.221306] env[69475]: DEBUG nova.scheduler.client.report [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1150.254229] env[69475]: DEBUG nova.compute.manager [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1150.254500] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1150.255573] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3bd6df1-d446-4a22-8690-8c4101f3ad05 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.266902] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1150.267214] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-42eb5302-d6f2-4f9e-926a-11b618eedc61 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.275324] env[69475]: DEBUG oslo_vmware.api [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1150.275324] env[69475]: value = "task-3509019" [ 1150.275324] env[69475]: _type = "Task" [ 1150.275324] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.284838] env[69475]: DEBUG oslo_vmware.api [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3509019, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.319160] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509018, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.553948] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: f222cc16-7581-41ff-ae7c-0538c7b3c721] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1150.727810] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.266s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1150.728354] env[69475]: DEBUG nova.compute.manager [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1150.731670] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.752s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1150.734221] env[69475]: INFO nova.compute.claims [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1150.786386] env[69475]: DEBUG oslo_vmware.api [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3509019, 'name': PowerOffVM_Task, 'duration_secs': 0.252994} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.786949] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1150.786949] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1150.787262] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-42382e20-b1d7-4750-ba47-8bb59bfd5ea8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.794058] env[69475]: DEBUG nova.objects.instance [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lazy-loading 'flavor' on Instance uuid 460d4b93-b18a-4965-9e2b-8c6175ccc91f {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1150.819792] env[69475]: DEBUG oslo_vmware.api [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509018, 'name': PowerOnVM_Task, 'duration_secs': 0.648689} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.820447] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1150.822766] env[69475]: DEBUG nova.compute.manager [None req-ec70652e-6c2c-45c4-99a5-c128101f124c tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1150.823966] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5b5e80-0232-430f-bb61-7fe20e60ff60 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.856337] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1150.856490] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1150.856672] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleting the datastore file [datastore2] 1459221f-4c35-4a49-a8c0-f8b4ee3e2265 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1150.856944] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-13c48a55-7f56-4a78-92fe-2aa11a49ee90 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.864053] env[69475]: DEBUG oslo_vmware.api [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for the task: (returnval){ [ 1150.864053] env[69475]: value = "task-3509021" [ 1150.864053] env[69475]: _type = "Task" [ 1150.864053] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.871390] env[69475]: DEBUG oslo_vmware.api [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3509021, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.968367] env[69475]: DEBUG nova.compute.manager [req-1b1b3876-6ed9-417e-8975-f6e94ab635e7 req-da996a58-d688-4729-825e-10e7a07da82a service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Received event network-changed-e27cfabc-cd13-4aaa-b9e1-eebffb18225e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1150.968470] env[69475]: DEBUG nova.compute.manager [req-1b1b3876-6ed9-417e-8975-f6e94ab635e7 req-da996a58-d688-4729-825e-10e7a07da82a service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Refreshing instance network info cache due to event network-changed-e27cfabc-cd13-4aaa-b9e1-eebffb18225e. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1150.968675] env[69475]: DEBUG oslo_concurrency.lockutils [req-1b1b3876-6ed9-417e-8975-f6e94ab635e7 req-da996a58-d688-4729-825e-10e7a07da82a service nova] Acquiring lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1150.968815] env[69475]: DEBUG oslo_concurrency.lockutils [req-1b1b3876-6ed9-417e-8975-f6e94ab635e7 req-da996a58-d688-4729-825e-10e7a07da82a service nova] Acquired lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1150.968972] env[69475]: DEBUG nova.network.neutron [req-1b1b3876-6ed9-417e-8975-f6e94ab635e7 req-da996a58-d688-4729-825e-10e7a07da82a service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Refreshing network info cache for port e27cfabc-cd13-4aaa-b9e1-eebffb18225e {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1151.058101] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: e8657a44-d786-4fa6-b39c-28fc71415ce8] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1151.238484] env[69475]: DEBUG nova.compute.utils [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1151.241972] env[69475]: DEBUG nova.compute.manager [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1151.242130] env[69475]: DEBUG nova.network.neutron [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1151.278749] env[69475]: DEBUG nova.policy [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '50223677b1b84004ad2ae335882b0bf2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52941494ff1643f6bb75cc1320a86b88', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1151.298649] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8e0d19ff-2fdc-4e29-8d3b-0e331b829b63 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.785s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1151.380998] env[69475]: DEBUG oslo_vmware.api [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3509021, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.545913] env[69475]: DEBUG nova.network.neutron [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Successfully created port: edf63788-66d9-4b3f-9e14-312bd4b1312f {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1151.560749] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 211f895a-bba5-4f10-9296-0d461af49f98] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1151.748511] env[69475]: DEBUG nova.compute.manager [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1151.786273] env[69475]: DEBUG nova.network.neutron [req-1b1b3876-6ed9-417e-8975-f6e94ab635e7 req-da996a58-d688-4729-825e-10e7a07da82a service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Updated VIF entry in instance network info cache for port e27cfabc-cd13-4aaa-b9e1-eebffb18225e. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1151.786682] env[69475]: DEBUG nova.network.neutron [req-1b1b3876-6ed9-417e-8975-f6e94ab635e7 req-da996a58-d688-4729-825e-10e7a07da82a service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Updating instance_info_cache with network_info: [{"id": "e27cfabc-cd13-4aaa-b9e1-eebffb18225e", "address": "fa:16:3e:9f:f8:e1", "network": {"id": "62083b30-e966-4802-8960-367f1137a879", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-79648352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572bc56741e24d57a4d01f202c8fb78d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tape27cfabc-cd", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1151.877209] env[69475]: DEBUG oslo_vmware.api [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3509021, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.885314] env[69475]: DEBUG nova.compute.manager [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Stashing vm_state: active {{(pid=69475) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1151.964312] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-053daf53-947a-41bb-b37a-f316863c9d77 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.971617] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d1385e-3515-4a3f-8032-184fe8f31022 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.002241] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aed9657-c694-42e9-b76b-1eaeadce3e3a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.009594] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612f4c9e-4556-4908-99f3-cc752b4d419c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.024189] env[69475]: DEBUG nova.compute.provider_tree [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1152.064606] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: dab7b6e7-ee88-4cfc-9ea8-6386561ff3ac] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1152.098908] env[69475]: DEBUG oslo_concurrency.lockutils [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.289484] env[69475]: DEBUG oslo_concurrency.lockutils [req-1b1b3876-6ed9-417e-8975-f6e94ab635e7 req-da996a58-d688-4729-825e-10e7a07da82a service nova] Releasing lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1152.375352] env[69475]: DEBUG oslo_vmware.api [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Task: {'id': task-3509021, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.271877} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.375669] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1152.375862] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1152.376051] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1152.376234] env[69475]: INFO nova.compute.manager [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Took 2.12 seconds to destroy the instance on the hypervisor. [ 1152.376483] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1152.376670] env[69475]: DEBUG nova.compute.manager [-] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1152.376763] env[69475]: DEBUG nova.network.neutron [-] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1152.401463] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.527430] env[69475]: DEBUG nova.scheduler.client.report [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1152.567794] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 41ddf915-343b-46e4-834e-11ab3899242f] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1152.625366] env[69475]: DEBUG nova.compute.manager [req-36be2ab6-f8ae-49d5-b323-8018a9718050 req-f5ae1587-1af0-48d6-94ef-9d521f50e718 service nova] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Received event network-vif-deleted-75df31f7-58d6-423b-80c5-e46458f30a93 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1152.626285] env[69475]: INFO nova.compute.manager [req-36be2ab6-f8ae-49d5-b323-8018a9718050 req-f5ae1587-1af0-48d6-94ef-9d521f50e718 service nova] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Neutron deleted interface 75df31f7-58d6-423b-80c5-e46458f30a93; detaching it from the instance and deleting it from the info cache [ 1152.626285] env[69475]: DEBUG nova.network.neutron [req-36be2ab6-f8ae-49d5-b323-8018a9718050 req-f5ae1587-1af0-48d6-94ef-9d521f50e718 service nova] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.712254] env[69475]: INFO nova.compute.manager [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Unrescuing [ 1152.712524] env[69475]: DEBUG oslo_concurrency.lockutils [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1152.712676] env[69475]: DEBUG oslo_concurrency.lockutils [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquired lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1152.712841] env[69475]: DEBUG nova.network.neutron [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1152.761925] env[69475]: DEBUG nova.compute.manager [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1152.789548] env[69475]: DEBUG nova.virt.hardware [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1152.789797] env[69475]: DEBUG nova.virt.hardware [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1152.789955] env[69475]: DEBUG nova.virt.hardware [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1152.790158] env[69475]: DEBUG nova.virt.hardware [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1152.790309] env[69475]: DEBUG nova.virt.hardware [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1152.790456] env[69475]: DEBUG nova.virt.hardware [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1152.790664] env[69475]: DEBUG nova.virt.hardware [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1152.790825] env[69475]: DEBUG nova.virt.hardware [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1152.790995] env[69475]: DEBUG nova.virt.hardware [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1152.791173] env[69475]: DEBUG nova.virt.hardware [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1152.791350] env[69475]: DEBUG nova.virt.hardware [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1152.792221] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1874d10a-0c4c-4e3e-b703-718fb2d0e431 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.800553] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ac97783-981e-4009-af92-be329bc86bf0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.003825] env[69475]: DEBUG nova.compute.manager [req-c2660d83-febb-4c60-a84f-0f5414a937cd req-e3755381-1f8f-493b-bb82-2d6a68471d1a service nova] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Received event network-vif-plugged-edf63788-66d9-4b3f-9e14-312bd4b1312f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1153.004413] env[69475]: DEBUG oslo_concurrency.lockutils [req-c2660d83-febb-4c60-a84f-0f5414a937cd req-e3755381-1f8f-493b-bb82-2d6a68471d1a service nova] Acquiring lock "1275b0c3-6d9f-48e5-acf0-2ee747b1bb77-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.004795] env[69475]: DEBUG oslo_concurrency.lockutils [req-c2660d83-febb-4c60-a84f-0f5414a937cd req-e3755381-1f8f-493b-bb82-2d6a68471d1a service nova] Lock "1275b0c3-6d9f-48e5-acf0-2ee747b1bb77-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.005119] env[69475]: DEBUG oslo_concurrency.lockutils [req-c2660d83-febb-4c60-a84f-0f5414a937cd req-e3755381-1f8f-493b-bb82-2d6a68471d1a service nova] Lock "1275b0c3-6d9f-48e5-acf0-2ee747b1bb77-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.005376] env[69475]: DEBUG nova.compute.manager [req-c2660d83-febb-4c60-a84f-0f5414a937cd req-e3755381-1f8f-493b-bb82-2d6a68471d1a service nova] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] No waiting events found dispatching network-vif-plugged-edf63788-66d9-4b3f-9e14-312bd4b1312f {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1153.005626] env[69475]: WARNING nova.compute.manager [req-c2660d83-febb-4c60-a84f-0f5414a937cd req-e3755381-1f8f-493b-bb82-2d6a68471d1a service nova] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Received unexpected event network-vif-plugged-edf63788-66d9-4b3f-9e14-312bd4b1312f for instance with vm_state building and task_state spawning. [ 1153.033515] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.302s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.034292] env[69475]: DEBUG nova.compute.manager [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1153.042694] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.909s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.042694] env[69475]: DEBUG nova.objects.instance [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lazy-loading 'resources' on Instance uuid e10a197a-a9b7-43ce-b8a8-ce186619feb9 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.046544] env[69475]: DEBUG nova.network.neutron [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Successfully updated port: edf63788-66d9-4b3f-9e14-312bd4b1312f {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1153.070822] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: ff09407e-93ea-4919-ba5f-b7ee6dd018a4] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1153.109341] env[69475]: DEBUG nova.network.neutron [-] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.128399] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-27bcbbd0-c958-4772-a8e4-6de02c3cb3f0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.137796] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46de7f3f-1abf-4a17-b0f0-7969720659f7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.169715] env[69475]: DEBUG nova.compute.manager [req-36be2ab6-f8ae-49d5-b323-8018a9718050 req-f5ae1587-1af0-48d6-94ef-9d521f50e718 service nova] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Detach interface failed, port_id=75df31f7-58d6-423b-80c5-e46458f30a93, reason: Instance 1459221f-4c35-4a49-a8c0-f8b4ee3e2265 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1153.423224] env[69475]: DEBUG nova.network.neutron [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Updating instance_info_cache with network_info: [{"id": "b2b04f22-0a1e-4c90-b84f-5d119fc7e528", "address": "fa:16:3e:2e:52:22", "network": {"id": "a5ecc342-ff26-4d68-9810-30407b73463d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-941429832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a68f54aa603f46468f50c83cd4fa3e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2b04f22-0a", "ovs_interfaceid": "b2b04f22-0a1e-4c90-b84f-5d119fc7e528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1153.541559] env[69475]: DEBUG nova.compute.utils [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1153.542882] env[69475]: DEBUG nova.compute.manager [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1153.543072] env[69475]: DEBUG nova.network.neutron [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1153.545364] env[69475]: DEBUG nova.objects.instance [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lazy-loading 'numa_topology' on Instance uuid e10a197a-a9b7-43ce-b8a8-ce186619feb9 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.550475] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "refresh_cache-1275b0c3-6d9f-48e5-acf0-2ee747b1bb77" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1153.550475] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "refresh_cache-1275b0c3-6d9f-48e5-acf0-2ee747b1bb77" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1153.550475] env[69475]: DEBUG nova.network.neutron [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1153.574605] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 8f18d683-7734-4798-8963-7336fe229f16] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1153.581419] env[69475]: DEBUG nova.policy [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc345af1a2c34fba98fa191b637a284a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2ba1a4125454d39bc92b6123447d98a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1153.612012] env[69475]: INFO nova.compute.manager [-] [instance: 1459221f-4c35-4a49-a8c0-f8b4ee3e2265] Took 1.24 seconds to deallocate network for instance. [ 1153.915525] env[69475]: DEBUG nova.network.neutron [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Successfully created port: 6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1153.925472] env[69475]: DEBUG oslo_concurrency.lockutils [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Releasing lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1153.926131] env[69475]: DEBUG nova.objects.instance [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lazy-loading 'flavor' on Instance uuid 92020fc6-aff6-437f-9e26-a5b61ea7e76f {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1154.047394] env[69475]: DEBUG nova.compute.manager [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1154.049956] env[69475]: DEBUG nova.objects.base [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1154.082251] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: c9b2f701-a73a-4561-b637-62e3ce98a44f] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1154.099042] env[69475]: DEBUG nova.network.neutron [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1154.118915] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.270748] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f4282a-2490-4038-92b7-2cc3bb4a6a9a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.278039] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0351a760-86d2-4c19-86cd-71eae4925933 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.307674] env[69475]: DEBUG nova.network.neutron [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Updating instance_info_cache with network_info: [{"id": "edf63788-66d9-4b3f-9e14-312bd4b1312f", "address": "fa:16:3e:d7:c7:a7", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapedf63788-66", "ovs_interfaceid": "edf63788-66d9-4b3f-9e14-312bd4b1312f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1154.309266] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceef9cb4-99a5-4eec-aa82-b9bd473b9149 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.316362] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8f7eac-18d9-4d3e-9380-e30919a63565 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.329311] env[69475]: DEBUG nova.compute.provider_tree [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1154.432142] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6c33d8-de0a-4e94-8111-57ca86ddc162 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.455357] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1154.455674] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d39ffb81-7ac2-403a-9b07-c2f9ca2bfdd6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.462801] env[69475]: DEBUG oslo_vmware.api [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1154.462801] env[69475]: value = "task-3509022" [ 1154.462801] env[69475]: _type = "Task" [ 1154.462801] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.472781] env[69475]: DEBUG oslo_vmware.api [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509022, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.586718] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: d802ccdf-b5de-4e78-91a8-6eaeceb9b2b3] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1154.813039] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "refresh_cache-1275b0c3-6d9f-48e5-acf0-2ee747b1bb77" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1154.813156] env[69475]: DEBUG nova.compute.manager [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Instance network_info: |[{"id": "edf63788-66d9-4b3f-9e14-312bd4b1312f", "address": "fa:16:3e:d7:c7:a7", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapedf63788-66", "ovs_interfaceid": "edf63788-66d9-4b3f-9e14-312bd4b1312f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1154.813805] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:c7:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'edf63788-66d9-4b3f-9e14-312bd4b1312f', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1154.821195] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1154.821396] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1154.821610] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15ad23fb-2af6-4a7b-be92-b049502cbeb1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.836366] env[69475]: DEBUG nova.scheduler.client.report [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1154.844234] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1154.844234] env[69475]: value = "task-3509023" [ 1154.844234] env[69475]: _type = "Task" [ 1154.844234] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.851632] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509023, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.972654] env[69475]: DEBUG oslo_vmware.api [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509022, 'name': PowerOffVM_Task, 'duration_secs': 0.254951} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.972991] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1154.978626] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Reconfiguring VM instance instance-00000069 to detach disk 2002 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1154.978945] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9125f3b7-d728-4852-b55d-1ea9ef759ba3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.997805] env[69475]: DEBUG oslo_vmware.api [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1154.997805] env[69475]: value = "task-3509024" [ 1154.997805] env[69475]: _type = "Task" [ 1154.997805] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.005963] env[69475]: DEBUG oslo_vmware.api [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509024, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.030239] env[69475]: DEBUG nova.compute.manager [req-b1cf6397-274f-4513-a141-c23a8acf2762 req-9a4f5b87-5563-4504-ba9c-0e8806623631 service nova] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Received event network-changed-edf63788-66d9-4b3f-9e14-312bd4b1312f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1155.030239] env[69475]: DEBUG nova.compute.manager [req-b1cf6397-274f-4513-a141-c23a8acf2762 req-9a4f5b87-5563-4504-ba9c-0e8806623631 service nova] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Refreshing instance network info cache due to event network-changed-edf63788-66d9-4b3f-9e14-312bd4b1312f. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1155.030327] env[69475]: DEBUG oslo_concurrency.lockutils [req-b1cf6397-274f-4513-a141-c23a8acf2762 req-9a4f5b87-5563-4504-ba9c-0e8806623631 service nova] Acquiring lock "refresh_cache-1275b0c3-6d9f-48e5-acf0-2ee747b1bb77" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.030479] env[69475]: DEBUG oslo_concurrency.lockutils [req-b1cf6397-274f-4513-a141-c23a8acf2762 req-9a4f5b87-5563-4504-ba9c-0e8806623631 service nova] Acquired lock "refresh_cache-1275b0c3-6d9f-48e5-acf0-2ee747b1bb77" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1155.030669] env[69475]: DEBUG nova.network.neutron [req-b1cf6397-274f-4513-a141-c23a8acf2762 req-9a4f5b87-5563-4504-ba9c-0e8806623631 service nova] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Refreshing network info cache for port edf63788-66d9-4b3f-9e14-312bd4b1312f {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1155.059713] env[69475]: DEBUG nova.compute.manager [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1155.087903] env[69475]: DEBUG nova.virt.hardware [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1155.088213] env[69475]: DEBUG nova.virt.hardware [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1155.088383] env[69475]: DEBUG nova.virt.hardware [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1155.088569] env[69475]: DEBUG nova.virt.hardware [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1155.088716] env[69475]: DEBUG nova.virt.hardware [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1155.088865] env[69475]: DEBUG nova.virt.hardware [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1155.089088] env[69475]: DEBUG nova.virt.hardware [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1155.089253] env[69475]: DEBUG nova.virt.hardware [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1155.089416] env[69475]: DEBUG nova.virt.hardware [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1155.089574] env[69475]: DEBUG nova.virt.hardware [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1155.089745] env[69475]: DEBUG nova.virt.hardware [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1155.090203] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 02ba199b-a7dc-421c-a14a-b562da275377] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1155.092659] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d5a2f5-70dd-4753-8087-6b56faaab835 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.101580] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e394371f-1590-4ddd-ae61-58283cfa26a6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.238211] env[69475]: DEBUG oslo_concurrency.lockutils [None req-90efe70b-d762-4e37-83bf-db930248ae00 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "4f091501-351c-45b8-9f64-4d28d4623df8" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.238523] env[69475]: DEBUG oslo_concurrency.lockutils [None req-90efe70b-d762-4e37-83bf-db930248ae00 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "4f091501-351c-45b8-9f64-4d28d4623df8" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.341391] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.302s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.343807] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.942s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.356963] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509023, 'name': CreateVM_Task, 'duration_secs': 0.319555} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.357735] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1155.358725] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.358907] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1155.359263] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1155.359771] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22719dac-68ab-4c56-981f-3091d57fbd51 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.364776] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1155.364776] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ec1264-0a71-56bf-2117-649a3d39ad9f" [ 1155.364776] env[69475]: _type = "Task" [ 1155.364776] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.374411] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ec1264-0a71-56bf-2117-649a3d39ad9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.419479] env[69475]: DEBUG nova.network.neutron [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Successfully updated port: 6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1155.507463] env[69475]: DEBUG oslo_vmware.api [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509024, 'name': ReconfigVM_Task, 'duration_secs': 0.301138} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.507704] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Reconfigured VM instance instance-00000069 to detach disk 2002 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1155.507885] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1155.508152] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-afc18f66-6569-4a7a-b8ee-9d310608067d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.514197] env[69475]: DEBUG oslo_vmware.api [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1155.514197] env[69475]: value = "task-3509025" [ 1155.514197] env[69475]: _type = "Task" [ 1155.514197] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.521172] env[69475]: DEBUG oslo_vmware.api [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509025, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.596182] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 78b5496c-f8e2-4681-a36b-50897b0f7325] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1155.732802] env[69475]: DEBUG nova.network.neutron [req-b1cf6397-274f-4513-a141-c23a8acf2762 req-9a4f5b87-5563-4504-ba9c-0e8806623631 service nova] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Updated VIF entry in instance network info cache for port edf63788-66d9-4b3f-9e14-312bd4b1312f. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1155.733210] env[69475]: DEBUG nova.network.neutron [req-b1cf6397-274f-4513-a141-c23a8acf2762 req-9a4f5b87-5563-4504-ba9c-0e8806623631 service nova] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Updating instance_info_cache with network_info: [{"id": "edf63788-66d9-4b3f-9e14-312bd4b1312f", "address": "fa:16:3e:d7:c7:a7", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapedf63788-66", "ovs_interfaceid": "edf63788-66d9-4b3f-9e14-312bd4b1312f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1155.741598] env[69475]: INFO nova.compute.manager [None req-90efe70b-d762-4e37-83bf-db930248ae00 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Detaching volume 53fb465d-7ef8-4cfc-bb5b-08b0d766b68a [ 1155.778463] env[69475]: INFO nova.virt.block_device [None req-90efe70b-d762-4e37-83bf-db930248ae00 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Attempting to driver detach volume 53fb465d-7ef8-4cfc-bb5b-08b0d766b68a from mountpoint /dev/sdb [ 1155.778754] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-90efe70b-d762-4e37-83bf-db930248ae00 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1155.778957] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-90efe70b-d762-4e37-83bf-db930248ae00 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701117', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'name': 'volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4f091501-351c-45b8-9f64-4d28d4623df8', 'attached_at': '', 'detached_at': '', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'serial': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1155.780108] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e54e8a-e995-40ca-bb40-7ebf28ad16c6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.801752] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7323669-bee7-4d72-a770-71ed89013f67 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.808734] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c68565-742f-49f7-94ff-012fdcb58335 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.832393] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-387dd0d5-f6ac-4652-a18e-6049466f6634 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.850633] env[69475]: INFO nova.compute.claims [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1155.854449] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-90efe70b-d762-4e37-83bf-db930248ae00 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] The volume has not been displaced from its original location: [datastore2] volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a/volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a.vmdk. No consolidation needed. {{(pid=69475) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1155.859898] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-90efe70b-d762-4e37-83bf-db930248ae00 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Reconfiguring VM instance instance-00000062 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1155.861607] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-40f16c3c-baaa-49b9-8d7b-f9d05fe21c54 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.875438] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4fa95abe-6b7b-440a-b144-fd62932f0b93 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 27.704s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1155.877141] env[69475]: DEBUG oslo_concurrency.lockutils [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 3.778s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.877272] env[69475]: INFO nova.compute.manager [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Unshelving [ 1155.894847] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ec1264-0a71-56bf-2117-649a3d39ad9f, 'name': SearchDatastore_Task, 'duration_secs': 0.041721} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.896533] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1155.896778] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1155.897025] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.897177] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1155.897370] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1155.897979] env[69475]: DEBUG oslo_vmware.api [None req-90efe70b-d762-4e37-83bf-db930248ae00 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1155.897979] env[69475]: value = "task-3509026" [ 1155.897979] env[69475]: _type = "Task" [ 1155.897979] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.898188] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce3b2216-0b8b-4d06-9ee3-dd64352be37a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.910059] env[69475]: DEBUG oslo_vmware.api [None req-90efe70b-d762-4e37-83bf-db930248ae00 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509026, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.911748] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1155.911925] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1155.912653] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40809597-9a2d-43a1-9f8e-504bd9346ce1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.917654] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1155.917654] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ba411f-1361-47ea-1065-a50e8fe64ec7" [ 1155.917654] env[69475]: _type = "Task" [ 1155.917654] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.925700] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1155.925700] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1155.925700] env[69475]: DEBUG nova.network.neutron [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1155.926629] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ba411f-1361-47ea-1065-a50e8fe64ec7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.024618] env[69475]: DEBUG oslo_vmware.api [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509025, 'name': PowerOnVM_Task, 'duration_secs': 0.397169} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.024884] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1156.025184] env[69475]: DEBUG nova.compute.manager [None req-47e76e11-fe5b-4bff-9976-121507eeee49 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1156.025952] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f1d78e4-cad3-44a4-ad7f-0c6fb6eb63c8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.101326] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: b8c50d0a-4b3d-4b70-9bd6-8304fa128e59] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1156.235896] env[69475]: DEBUG oslo_concurrency.lockutils [req-b1cf6397-274f-4513-a141-c23a8acf2762 req-9a4f5b87-5563-4504-ba9c-0e8806623631 service nova] Releasing lock "refresh_cache-1275b0c3-6d9f-48e5-acf0-2ee747b1bb77" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1156.356739] env[69475]: INFO nova.compute.resource_tracker [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating resource usage from migration 96c70506-3255-4191-9f79-d5b05c450614 [ 1156.410239] env[69475]: DEBUG oslo_vmware.api [None req-90efe70b-d762-4e37-83bf-db930248ae00 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509026, 'name': ReconfigVM_Task, 'duration_secs': 0.235916} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.413739] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-90efe70b-d762-4e37-83bf-db930248ae00 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Reconfigured VM instance instance-00000062 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1156.418669] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10182898-e266-44a6-93a0-8de873dae35b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.441206] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ba411f-1361-47ea-1065-a50e8fe64ec7, 'name': SearchDatastore_Task, 'duration_secs': 0.010623} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.443032] env[69475]: DEBUG oslo_vmware.api [None req-90efe70b-d762-4e37-83bf-db930248ae00 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1156.443032] env[69475]: value = "task-3509027" [ 1156.443032] env[69475]: _type = "Task" [ 1156.443032] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.443270] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58a0eb63-72fd-460a-92ed-84dacea3ccf0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.453311] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1156.453311] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522f1288-cb77-a955-af3f-8d0da1ad8630" [ 1156.453311] env[69475]: _type = "Task" [ 1156.453311] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.457323] env[69475]: DEBUG oslo_vmware.api [None req-90efe70b-d762-4e37-83bf-db930248ae00 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509027, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.469522] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522f1288-cb77-a955-af3f-8d0da1ad8630, 'name': SearchDatastore_Task, 'duration_secs': 0.011616} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.471013] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1156.471389] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77/1275b0c3-6d9f-48e5-acf0-2ee747b1bb77.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1156.472125] env[69475]: DEBUG nova.network.neutron [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1156.473965] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ce93d4e0-9357-4826-bd55-8356ac094a8d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.482723] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1156.482723] env[69475]: value = "task-3509028" [ 1156.482723] env[69475]: _type = "Task" [ 1156.482723] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.492196] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509028, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.590999] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56f9d7a-5acb-4ce4-950f-50515104371a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.598145] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f6fe207-0192-49a5-9f5c-e3c96dade20f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.603648] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 2c9b3182-fe70-4bb4-8ee3-0cacbff6d6fc] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1156.630823] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 4b17d080-594b-44e7-83aa-ebe0787722d9] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1156.636024] env[69475]: DEBUG nova.network.neutron [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Updating instance_info_cache with network_info: [{"id": "6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c", "address": "fa:16:3e:41:b4:2a", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e3f5a22-9b", "ovs_interfaceid": "6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1156.636024] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0906e4f7-6a59-485d-ae08-ffd053a91a54 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.643219] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2ef5754-f21f-475c-94f9-5b951c25baf6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.658184] env[69475]: DEBUG nova.compute.provider_tree [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1156.901178] env[69475]: DEBUG oslo_concurrency.lockutils [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.955536] env[69475]: DEBUG oslo_vmware.api [None req-90efe70b-d762-4e37-83bf-db930248ae00 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509027, 'name': ReconfigVM_Task, 'duration_secs': 0.141338} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.955839] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-90efe70b-d762-4e37-83bf-db930248ae00 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701117', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'name': 'volume-53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '4f091501-351c-45b8-9f64-4d28d4623df8', 'attached_at': '', 'detached_at': '', 'volume_id': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a', 'serial': '53fb465d-7ef8-4cfc-bb5b-08b0d766b68a'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1156.992740] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509028, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47939} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.993036] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77/1275b0c3-6d9f-48e5-acf0-2ee747b1bb77.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1156.993498] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1156.993778] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-812b4d06-531a-4ac3-b4f8-caa7417b534b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.999901] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1156.999901] env[69475]: value = "task-3509029" [ 1156.999901] env[69475]: _type = "Task" [ 1156.999901] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.009353] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509029, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.058235] env[69475]: DEBUG nova.compute.manager [req-19e8efd5-eef8-4084-a937-a3dcdc37b866 req-18bb3e4e-6b0a-4bc6-9b9b-fda226f1e086 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Received event network-vif-plugged-6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1157.058379] env[69475]: DEBUG oslo_concurrency.lockutils [req-19e8efd5-eef8-4084-a937-a3dcdc37b866 req-18bb3e4e-6b0a-4bc6-9b9b-fda226f1e086 service nova] Acquiring lock "eadfee29-c7fc-4d33-8869-7ea8e753554c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.058606] env[69475]: DEBUG oslo_concurrency.lockutils [req-19e8efd5-eef8-4084-a937-a3dcdc37b866 req-18bb3e4e-6b0a-4bc6-9b9b-fda226f1e086 service nova] Lock "eadfee29-c7fc-4d33-8869-7ea8e753554c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.058767] env[69475]: DEBUG oslo_concurrency.lockutils [req-19e8efd5-eef8-4084-a937-a3dcdc37b866 req-18bb3e4e-6b0a-4bc6-9b9b-fda226f1e086 service nova] Lock "eadfee29-c7fc-4d33-8869-7ea8e753554c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.058930] env[69475]: DEBUG nova.compute.manager [req-19e8efd5-eef8-4084-a937-a3dcdc37b866 req-18bb3e4e-6b0a-4bc6-9b9b-fda226f1e086 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] No waiting events found dispatching network-vif-plugged-6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1157.059107] env[69475]: WARNING nova.compute.manager [req-19e8efd5-eef8-4084-a937-a3dcdc37b866 req-18bb3e4e-6b0a-4bc6-9b9b-fda226f1e086 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Received unexpected event network-vif-plugged-6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c for instance with vm_state building and task_state spawning. [ 1157.059263] env[69475]: DEBUG nova.compute.manager [req-19e8efd5-eef8-4084-a937-a3dcdc37b866 req-18bb3e4e-6b0a-4bc6-9b9b-fda226f1e086 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Received event network-changed-6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1157.059410] env[69475]: DEBUG nova.compute.manager [req-19e8efd5-eef8-4084-a937-a3dcdc37b866 req-18bb3e4e-6b0a-4bc6-9b9b-fda226f1e086 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Refreshing instance network info cache due to event network-changed-6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1157.059567] env[69475]: DEBUG oslo_concurrency.lockutils [req-19e8efd5-eef8-4084-a937-a3dcdc37b866 req-18bb3e4e-6b0a-4bc6-9b9b-fda226f1e086 service nova] Acquiring lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.139197] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 9c27dcc3-67df-46ea-947d-b2ecdaeeb003] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1157.141159] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1157.141447] env[69475]: DEBUG nova.compute.manager [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Instance network_info: |[{"id": "6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c", "address": "fa:16:3e:41:b4:2a", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e3f5a22-9b", "ovs_interfaceid": "6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1157.141890] env[69475]: DEBUG oslo_concurrency.lockutils [req-19e8efd5-eef8-4084-a937-a3dcdc37b866 req-18bb3e4e-6b0a-4bc6-9b9b-fda226f1e086 service nova] Acquired lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.142090] env[69475]: DEBUG nova.network.neutron [req-19e8efd5-eef8-4084-a937-a3dcdc37b866 req-18bb3e4e-6b0a-4bc6-9b9b-fda226f1e086 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Refreshing network info cache for port 6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1157.143104] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:b4:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4954661-ff70-43dd-bc60-8cbca6b9cbfa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1157.150964] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1157.151820] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1157.152064] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5822a838-3874-47db-97ad-f0ad80720893 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.169007] env[69475]: DEBUG nova.scheduler.client.report [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1157.177599] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1157.177599] env[69475]: value = "task-3509030" [ 1157.177599] env[69475]: _type = "Task" [ 1157.177599] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.186473] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509030, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.367156] env[69475]: DEBUG nova.compute.manager [req-ecdb383b-2fcf-4783-97af-be87d6163d7d req-454604c7-2c04-4864-b743-cf095c065951 service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Received event network-changed-b2b04f22-0a1e-4c90-b84f-5d119fc7e528 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1157.367390] env[69475]: DEBUG nova.compute.manager [req-ecdb383b-2fcf-4783-97af-be87d6163d7d req-454604c7-2c04-4864-b743-cf095c065951 service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Refreshing instance network info cache due to event network-changed-b2b04f22-0a1e-4c90-b84f-5d119fc7e528. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1157.367644] env[69475]: DEBUG oslo_concurrency.lockutils [req-ecdb383b-2fcf-4783-97af-be87d6163d7d req-454604c7-2c04-4864-b743-cf095c065951 service nova] Acquiring lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.367819] env[69475]: DEBUG oslo_concurrency.lockutils [req-ecdb383b-2fcf-4783-97af-be87d6163d7d req-454604c7-2c04-4864-b743-cf095c065951 service nova] Acquired lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.368023] env[69475]: DEBUG nova.network.neutron [req-ecdb383b-2fcf-4783-97af-be87d6163d7d req-454604c7-2c04-4864-b743-cf095c065951 service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Refreshing network info cache for port b2b04f22-0a1e-4c90-b84f-5d119fc7e528 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1157.513976] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509029, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059808} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.513976] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1157.513976] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c1bd3a0-2649-4afa-8a64-6c4b60d5eeec {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.536167] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77/1275b0c3-6d9f-48e5-acf0-2ee747b1bb77.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1157.537933] env[69475]: DEBUG nova.objects.instance [None req-90efe70b-d762-4e37-83bf-db930248ae00 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lazy-loading 'flavor' on Instance uuid 4f091501-351c-45b8-9f64-4d28d4623df8 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1157.539317] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e822ddb3-5b46-40be-8375-908929112972 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.563172] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1157.563172] env[69475]: value = "task-3509031" [ 1157.563172] env[69475]: _type = "Task" [ 1157.563172] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.573177] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509031, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.642740] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: f40aa0bb-af1d-4f8f-a906-f1c83307b465] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1157.674114] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.330s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.674114] env[69475]: INFO nova.compute.manager [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Migrating [ 1157.682904] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.564s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.683307] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.685174] env[69475]: DEBUG oslo_concurrency.lockutils [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.784s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.685406] env[69475]: DEBUG nova.objects.instance [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lazy-loading 'pci_requests' on Instance uuid e10a197a-a9b7-43ce-b8a8-ce186619feb9 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1157.700183] env[69475]: DEBUG nova.objects.instance [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lazy-loading 'numa_topology' on Instance uuid e10a197a-a9b7-43ce-b8a8-ce186619feb9 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1157.710650] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509030, 'name': CreateVM_Task, 'duration_secs': 0.345509} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1157.711687] env[69475]: INFO nova.scheduler.client.report [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Deleted allocations for instance 1459221f-4c35-4a49-a8c0-f8b4ee3e2265 [ 1157.712586] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1157.716171] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1157.716171] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1157.716359] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1157.716981] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d14ca4b-388e-4e92-b2b5-7e68bcd0284d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.722500] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1157.722500] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f692cd-c775-4319-ec8f-fd047eb2b109" [ 1157.722500] env[69475]: _type = "Task" [ 1157.722500] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.730815] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f692cd-c775-4319-ec8f-fd047eb2b109, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.872954] env[69475]: DEBUG nova.network.neutron [req-19e8efd5-eef8-4084-a937-a3dcdc37b866 req-18bb3e4e-6b0a-4bc6-9b9b-fda226f1e086 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Updated VIF entry in instance network info cache for port 6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1157.873349] env[69475]: DEBUG nova.network.neutron [req-19e8efd5-eef8-4084-a937-a3dcdc37b866 req-18bb3e4e-6b0a-4bc6-9b9b-fda226f1e086 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Updating instance_info_cache with network_info: [{"id": "6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c", "address": "fa:16:3e:41:b4:2a", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e3f5a22-9b", "ovs_interfaceid": "6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.059294] env[69475]: DEBUG oslo_concurrency.lockutils [None req-90efe70b-d762-4e37-83bf-db930248ae00 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "4f091501-351c-45b8-9f64-4d28d4623df8" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 2.821s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.078536] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509031, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.132186] env[69475]: DEBUG nova.network.neutron [req-ecdb383b-2fcf-4783-97af-be87d6163d7d req-454604c7-2c04-4864-b743-cf095c065951 service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Updated VIF entry in instance network info cache for port b2b04f22-0a1e-4c90-b84f-5d119fc7e528. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1158.132568] env[69475]: DEBUG nova.network.neutron [req-ecdb383b-2fcf-4783-97af-be87d6163d7d req-454604c7-2c04-4864-b743-cf095c065951 service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Updating instance_info_cache with network_info: [{"id": "b2b04f22-0a1e-4c90-b84f-5d119fc7e528", "address": "fa:16:3e:2e:52:22", "network": {"id": "a5ecc342-ff26-4d68-9810-30407b73463d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-941429832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a68f54aa603f46468f50c83cd4fa3e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2b04f22-0a", "ovs_interfaceid": "b2b04f22-0a1e-4c90-b84f-5d119fc7e528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.145549] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 0a65565c-c679-47e5-8606-832fe3876af6] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1158.205082] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.205299] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1158.205482] env[69475]: DEBUG nova.network.neutron [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1158.206757] env[69475]: INFO nova.compute.claims [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1158.222149] env[69475]: DEBUG oslo_concurrency.lockutils [None req-992737ff-7013-40ac-a5e6-52ea1b5d9dde tempest-DeleteServersTestJSON-1823655983 tempest-DeleteServersTestJSON-1823655983-project-member] Lock "1459221f-4c35-4a49-a8c0-f8b4ee3e2265" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.477s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.233234] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f692cd-c775-4319-ec8f-fd047eb2b109, 'name': SearchDatastore_Task, 'duration_secs': 0.014901} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.233553] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1158.233793] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1158.234052] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.234214] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1158.234408] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1158.234892] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57ab2703-a407-4d2c-9110-863674cae0a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.243425] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1158.243626] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1158.244378] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-309d1926-227a-4b47-86f5-483ffc3cb7d9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.250136] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1158.250136] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523a352d-b396-4aa9-1cd5-b752d28dc0f7" [ 1158.250136] env[69475]: _type = "Task" [ 1158.250136] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.257895] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523a352d-b396-4aa9-1cd5-b752d28dc0f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.376560] env[69475]: DEBUG oslo_concurrency.lockutils [req-19e8efd5-eef8-4084-a937-a3dcdc37b866 req-18bb3e4e-6b0a-4bc6-9b9b-fda226f1e086 service nova] Releasing lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1158.483085] env[69475]: DEBUG oslo_concurrency.lockutils [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "4f091501-351c-45b8-9f64-4d28d4623df8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.483085] env[69475]: DEBUG oslo_concurrency.lockutils [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "4f091501-351c-45b8-9f64-4d28d4623df8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.483085] env[69475]: DEBUG oslo_concurrency.lockutils [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "4f091501-351c-45b8-9f64-4d28d4623df8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.483356] env[69475]: DEBUG oslo_concurrency.lockutils [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "4f091501-351c-45b8-9f64-4d28d4623df8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.483356] env[69475]: DEBUG oslo_concurrency.lockutils [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "4f091501-351c-45b8-9f64-4d28d4623df8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.485453] env[69475]: INFO nova.compute.manager [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Terminating instance [ 1158.573493] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509031, 'name': ReconfigVM_Task, 'duration_secs': 0.572997} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.573754] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Reconfigured VM instance instance-00000075 to attach disk [datastore2] 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77/1275b0c3-6d9f-48e5-acf0-2ee747b1bb77.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1158.574444] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-335f0cf4-6ed0-4089-97d2-74b59673fecd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.580376] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1158.580376] env[69475]: value = "task-3509033" [ 1158.580376] env[69475]: _type = "Task" [ 1158.580376] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.591268] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509033, 'name': Rename_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.637023] env[69475]: DEBUG oslo_concurrency.lockutils [req-ecdb383b-2fcf-4783-97af-be87d6163d7d req-454604c7-2c04-4864-b743-cf095c065951 service nova] Releasing lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1158.649171] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: b1b04eb9-ded6-4425-8a06-0c26c086a09b] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1158.760501] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523a352d-b396-4aa9-1cd5-b752d28dc0f7, 'name': SearchDatastore_Task, 'duration_secs': 0.009521} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.761309] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11ef4c3e-64a5-43b6-8226-b504e459f145 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.767028] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1158.767028] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f6f1f5-4283-35a0-873a-6be9dda01f88" [ 1158.767028] env[69475]: _type = "Task" [ 1158.767028] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.774085] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f6f1f5-4283-35a0-873a-6be9dda01f88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.958207] env[69475]: DEBUG nova.network.neutron [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating instance_info_cache with network_info: [{"id": "81121438-ec92-4519-97f1-e2a871109623", "address": "fa:16:3e:98:9c:8a", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81121438-ec", "ovs_interfaceid": "81121438-ec92-4519-97f1-e2a871109623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.989568] env[69475]: DEBUG nova.compute.manager [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1158.989778] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1158.990660] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b36d38ff-df89-4d3a-9f41-cf53fdd0c045 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.999080] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1158.999080] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72a22c3b-e2ff-43d9-abc7-334ce37dcbb8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.005761] env[69475]: DEBUG oslo_vmware.api [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1159.005761] env[69475]: value = "task-3509034" [ 1159.005761] env[69475]: _type = "Task" [ 1159.005761] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.013715] env[69475]: DEBUG oslo_vmware.api [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509034, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.090260] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509033, 'name': Rename_Task, 'duration_secs': 0.250439} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.090532] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1159.090778] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-479e2ea2-a228-4adc-a063-a28f00650096 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.097445] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1159.097445] env[69475]: value = "task-3509035" [ 1159.097445] env[69475]: _type = "Task" [ 1159.097445] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.105106] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509035, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.153762] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: a3ee83aa-f753-49e3-9db2-b1b67d6d211e] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1159.278887] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f6f1f5-4283-35a0-873a-6be9dda01f88, 'name': SearchDatastore_Task, 'duration_secs': 0.021682} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.279138] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1159.279633] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] eadfee29-c7fc-4d33-8869-7ea8e753554c/eadfee29-c7fc-4d33-8869-7ea8e753554c.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1159.279633] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d0939aa-d642-4941-9712-59467e571f2c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.288214] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1159.288214] env[69475]: value = "task-3509036" [ 1159.288214] env[69475]: _type = "Task" [ 1159.288214] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.297892] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509036, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.396018] env[69475]: DEBUG nova.compute.manager [req-119bc2be-dc37-49a2-9dd2-09dfa8648aaf req-1c751f15-3d33-420d-ad04-82ef79c2586f service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Received event network-changed-b2b04f22-0a1e-4c90-b84f-5d119fc7e528 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1159.396475] env[69475]: DEBUG nova.compute.manager [req-119bc2be-dc37-49a2-9dd2-09dfa8648aaf req-1c751f15-3d33-420d-ad04-82ef79c2586f service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Refreshing instance network info cache due to event network-changed-b2b04f22-0a1e-4c90-b84f-5d119fc7e528. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1159.396475] env[69475]: DEBUG oslo_concurrency.lockutils [req-119bc2be-dc37-49a2-9dd2-09dfa8648aaf req-1c751f15-3d33-420d-ad04-82ef79c2586f service nova] Acquiring lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1159.396587] env[69475]: DEBUG oslo_concurrency.lockutils [req-119bc2be-dc37-49a2-9dd2-09dfa8648aaf req-1c751f15-3d33-420d-ad04-82ef79c2586f service nova] Acquired lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1159.396721] env[69475]: DEBUG nova.network.neutron [req-119bc2be-dc37-49a2-9dd2-09dfa8648aaf req-1c751f15-3d33-420d-ad04-82ef79c2586f service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Refreshing network info cache for port b2b04f22-0a1e-4c90-b84f-5d119fc7e528 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1159.410118] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a41742b5-da54-4053-9f77-c09c7831d3e1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.417237] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdbca2a6-cbd0-40ce-847a-61b4361da68c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.449323] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3f8162-6910-4be6-a9de-40979d25b20e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.456967] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-650c1f39-8d26-4842-8aff-048af72080ab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.462026] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1159.472638] env[69475]: DEBUG nova.compute.provider_tree [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1159.516124] env[69475]: DEBUG oslo_vmware.api [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509034, 'name': PowerOffVM_Task, 'duration_secs': 0.225124} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.516418] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1159.516589] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1159.516852] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0056cf32-8e43-4e74-8bf6-9199493cedfb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.608698] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509035, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.613920] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1159.614174] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1159.614415] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleting the datastore file [datastore2] 4f091501-351c-45b8-9f64-4d28d4623df8 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1159.614677] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9a33da6-8665-4698-9bb2-001e583d1a78 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.622758] env[69475]: DEBUG oslo_vmware.api [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1159.622758] env[69475]: value = "task-3509038" [ 1159.622758] env[69475]: _type = "Task" [ 1159.622758] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.631500] env[69475]: DEBUG oslo_vmware.api [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509038, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.656608] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 82236043-3222-4134-8717-4c239ed12aba] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1159.801890] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509036, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472308} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.801890] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] eadfee29-c7fc-4d33-8869-7ea8e753554c/eadfee29-c7fc-4d33-8869-7ea8e753554c.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1159.801890] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1159.801890] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7bacde0-621c-4176-ac19-13eb384d7d4c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.807194] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1159.807194] env[69475]: value = "task-3509039" [ 1159.807194] env[69475]: _type = "Task" [ 1159.807194] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.815928] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509039, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.975927] env[69475]: DEBUG nova.scheduler.client.report [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1160.109652] env[69475]: DEBUG oslo_vmware.api [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509035, 'name': PowerOnVM_Task, 'duration_secs': 0.600537} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.109652] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1160.109864] env[69475]: INFO nova.compute.manager [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Took 7.35 seconds to spawn the instance on the hypervisor. [ 1160.109942] env[69475]: DEBUG nova.compute.manager [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1160.110688] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2618d6-21e8-4d09-af41-2ee8946ff588 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.119064] env[69475]: DEBUG nova.network.neutron [req-119bc2be-dc37-49a2-9dd2-09dfa8648aaf req-1c751f15-3d33-420d-ad04-82ef79c2586f service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Updated VIF entry in instance network info cache for port b2b04f22-0a1e-4c90-b84f-5d119fc7e528. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1160.119376] env[69475]: DEBUG nova.network.neutron [req-119bc2be-dc37-49a2-9dd2-09dfa8648aaf req-1c751f15-3d33-420d-ad04-82ef79c2586f service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Updating instance_info_cache with network_info: [{"id": "b2b04f22-0a1e-4c90-b84f-5d119fc7e528", "address": "fa:16:3e:2e:52:22", "network": {"id": "a5ecc342-ff26-4d68-9810-30407b73463d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-941429832-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.184", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a68f54aa603f46468f50c83cd4fa3e8c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "84aee122-f630-43c5-9cc1-3a38d3819c82", "external-id": "nsx-vlan-transportzone-816", "segmentation_id": 816, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2b04f22-0a", "ovs_interfaceid": "b2b04f22-0a1e-4c90-b84f-5d119fc7e528", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1160.751969] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: eadfea6c-3fce-4f54-b889-d994d61ec14f] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1160.757484] env[69475]: DEBUG oslo_concurrency.lockutils [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.072s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.766424] env[69475]: DEBUG oslo_concurrency.lockutils [req-119bc2be-dc37-49a2-9dd2-09dfa8648aaf req-1c751f15-3d33-420d-ad04-82ef79c2586f service nova] Releasing lock "refresh_cache-92020fc6-aff6-437f-9e26-a5b61ea7e76f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1160.769542] env[69475]: INFO nova.compute.manager [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Took 12.32 seconds to build instance. [ 1160.774382] env[69475]: DEBUG oslo_vmware.api [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509038, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256364} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.777307] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1160.777406] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1160.777592] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1160.777723] env[69475]: INFO nova.compute.manager [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Took 1.79 seconds to destroy the instance on the hypervisor. [ 1160.777955] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1160.778417] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509039, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.142498} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.778652] env[69475]: DEBUG nova.compute.manager [-] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1160.778714] env[69475]: DEBUG nova.network.neutron [-] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1160.780262] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1160.781118] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f7ff9d-1dab-41cb-a408-cc9c9dd4d028 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.806576] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] eadfee29-c7fc-4d33-8869-7ea8e753554c/eadfee29-c7fc-4d33-8869-7ea8e753554c.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1160.807498] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2072534d-3c70-4ce9-b3ab-9ddf638a79de {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.822804] env[69475]: INFO nova.network.neutron [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Updating port e27cfabc-cd13-4aaa-b9e1-eebffb18225e with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1160.830424] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1160.830424] env[69475]: value = "task-3509040" [ 1160.830424] env[69475]: _type = "Task" [ 1160.830424] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.838184] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509040, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.262376] env[69475]: DEBUG nova.compute.manager [req-69f447bb-3727-4a69-aeaa-f0f174a43071 req-288f7112-6b20-4f05-a8e2-9d473526d4af service nova] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Received event network-vif-deleted-fbde5d12-5376-4f30-a0eb-1e63c7d36242 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1161.262708] env[69475]: INFO nova.compute.manager [req-69f447bb-3727-4a69-aeaa-f0f174a43071 req-288f7112-6b20-4f05-a8e2-9d473526d4af service nova] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Neutron deleted interface fbde5d12-5376-4f30-a0eb-1e63c7d36242; detaching it from the instance and deleting it from the info cache [ 1161.262991] env[69475]: DEBUG nova.network.neutron [req-69f447bb-3727-4a69-aeaa-f0f174a43071 req-288f7112-6b20-4f05-a8e2-9d473526d4af service nova] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.268026] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 980bb0eb-121c-4703-a453-fb0b4351e9e3] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1161.271715] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833b7070-702f-4c81-86bc-31db69b99249 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "1275b0c3-6d9f-48e5-acf0-2ee747b1bb77" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.836s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.272927] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30158bce-aa1f-4e73-a61f-ef099a978caf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.311493] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating instance '460d4b93-b18a-4965-9e2b-8c6175ccc91f' progress to 0 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1161.348871] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509040, 'name': ReconfigVM_Task, 'duration_secs': 0.272451} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.349515] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Reconfigured VM instance instance-00000076 to attach disk [datastore2] eadfee29-c7fc-4d33-8869-7ea8e753554c/eadfee29-c7fc-4d33-8869-7ea8e753554c.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1161.350202] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4e6e657a-95cc-4dd1-896e-251234255afa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.358632] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1161.358632] env[69475]: value = "task-3509041" [ 1161.358632] env[69475]: _type = "Task" [ 1161.358632] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.371462] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509041, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.601816] env[69475]: DEBUG oslo_concurrency.lockutils [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "1275b0c3-6d9f-48e5-acf0-2ee747b1bb77" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.602099] env[69475]: DEBUG oslo_concurrency.lockutils [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "1275b0c3-6d9f-48e5-acf0-2ee747b1bb77" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.602460] env[69475]: DEBUG oslo_concurrency.lockutils [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "1275b0c3-6d9f-48e5-acf0-2ee747b1bb77-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.602745] env[69475]: DEBUG oslo_concurrency.lockutils [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "1275b0c3-6d9f-48e5-acf0-2ee747b1bb77-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.602932] env[69475]: DEBUG oslo_concurrency.lockutils [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "1275b0c3-6d9f-48e5-acf0-2ee747b1bb77-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.605525] env[69475]: INFO nova.compute.manager [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Terminating instance [ 1161.742473] env[69475]: DEBUG nova.network.neutron [-] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1161.771345] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-160346f9-ff4d-4b9a-9caf-cdff998b1271 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.778972] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: e8c2d21e-2e42-48de-928e-c5fd944899b6] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1161.784344] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4931bcac-3781-41d7-8374-22c1b376e8a6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.826038] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1161.827081] env[69475]: DEBUG nova.compute.manager [req-69f447bb-3727-4a69-aeaa-f0f174a43071 req-288f7112-6b20-4f05-a8e2-9d473526d4af service nova] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Detach interface failed, port_id=fbde5d12-5376-4f30-a0eb-1e63c7d36242, reason: Instance 4f091501-351c-45b8-9f64-4d28d4623df8 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1161.828033] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dcc89cee-5a5e-4d18-b218-84418ae47c43 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.834252] env[69475]: DEBUG oslo_vmware.api [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1161.834252] env[69475]: value = "task-3509042" [ 1161.834252] env[69475]: _type = "Task" [ 1161.834252] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.844870] env[69475]: DEBUG oslo_vmware.api [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509042, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.869691] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509041, 'name': Rename_Task, 'duration_secs': 0.301141} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.870031] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1161.870334] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-87813139-709b-4e45-9061-bd8869bb64e0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.878315] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1161.878315] env[69475]: value = "task-3509043" [ 1161.878315] env[69475]: _type = "Task" [ 1161.878315] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.885297] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509043, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.110203] env[69475]: DEBUG nova.compute.manager [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1162.110487] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1162.111407] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6965d6e-3b77-41e0-9e3a-bc6a45611bf9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.122030] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1162.122411] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e4f635a2-3ddf-4e2f-9da8-03468bfd48c4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.128710] env[69475]: DEBUG oslo_vmware.api [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1162.128710] env[69475]: value = "task-3509044" [ 1162.128710] env[69475]: _type = "Task" [ 1162.128710] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.137342] env[69475]: DEBUG oslo_vmware.api [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509044, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.245310] env[69475]: INFO nova.compute.manager [-] [instance: 4f091501-351c-45b8-9f64-4d28d4623df8] Took 1.47 seconds to deallocate network for instance. [ 1162.300948] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: a87da6e4-d7ec-4624-94bc-b76ade04d511] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1162.346037] env[69475]: DEBUG oslo_vmware.api [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509042, 'name': PowerOffVM_Task, 'duration_secs': 0.28316} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.346037] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1162.346261] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating instance '460d4b93-b18a-4965-9e2b-8c6175ccc91f' progress to 17 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1162.390155] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509043, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.639944] env[69475]: DEBUG oslo_vmware.api [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509044, 'name': PowerOffVM_Task, 'duration_secs': 0.213204} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.639944] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1162.639944] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1162.640302] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e08a514-e8e1-413c-81da-083d99b72e04 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.726460] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1162.726700] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1162.726925] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleting the datastore file [datastore2] 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1162.727177] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d2450a6-1385-4465-a922-23c537146859 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.737603] env[69475]: DEBUG oslo_vmware.api [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1162.737603] env[69475]: value = "task-3509046" [ 1162.737603] env[69475]: _type = "Task" [ 1162.737603] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.743868] env[69475]: DEBUG oslo_vmware.api [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509046, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.752211] env[69475]: DEBUG oslo_concurrency.lockutils [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.752576] env[69475]: DEBUG oslo_concurrency.lockutils [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.752880] env[69475]: DEBUG nova.objects.instance [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lazy-loading 'resources' on Instance uuid 4f091501-351c-45b8-9f64-4d28d4623df8 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1162.779334] env[69475]: DEBUG nova.compute.manager [req-6465a31e-16af-4c32-80d8-652bb64d47e8 req-b95cab55-343b-4938-a6ee-14de868b5970 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Received event network-vif-plugged-e27cfabc-cd13-4aaa-b9e1-eebffb18225e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1162.779674] env[69475]: DEBUG oslo_concurrency.lockutils [req-6465a31e-16af-4c32-80d8-652bb64d47e8 req-b95cab55-343b-4938-a6ee-14de868b5970 service nova] Acquiring lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.781390] env[69475]: DEBUG oslo_concurrency.lockutils [req-6465a31e-16af-4c32-80d8-652bb64d47e8 req-b95cab55-343b-4938-a6ee-14de868b5970 service nova] Lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.781885] env[69475]: DEBUG oslo_concurrency.lockutils [req-6465a31e-16af-4c32-80d8-652bb64d47e8 req-b95cab55-343b-4938-a6ee-14de868b5970 service nova] Lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.781885] env[69475]: DEBUG nova.compute.manager [req-6465a31e-16af-4c32-80d8-652bb64d47e8 req-b95cab55-343b-4938-a6ee-14de868b5970 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] No waiting events found dispatching network-vif-plugged-e27cfabc-cd13-4aaa-b9e1-eebffb18225e {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1162.782074] env[69475]: WARNING nova.compute.manager [req-6465a31e-16af-4c32-80d8-652bb64d47e8 req-b95cab55-343b-4938-a6ee-14de868b5970 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Received unexpected event network-vif-plugged-e27cfabc-cd13-4aaa-b9e1-eebffb18225e for instance with vm_state shelved_offloaded and task_state spawning. [ 1162.807356] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 619a87e7-097c-41af-8452-5437b82e7ebe] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1162.855183] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1162.855617] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1162.855793] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1162.855987] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1162.856292] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1162.856597] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1162.856834] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1162.857071] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1162.857527] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1162.857761] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1162.858014] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1162.866027] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53750178-c95c-48c4-8b76-4161d38214b4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.883837] env[69475]: DEBUG oslo_vmware.api [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1162.883837] env[69475]: value = "task-3509047" [ 1162.883837] env[69475]: _type = "Task" [ 1162.883837] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.887364] env[69475]: DEBUG oslo_vmware.api [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509043, 'name': PowerOnVM_Task, 'duration_secs': 0.565481} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.890561] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1162.890914] env[69475]: INFO nova.compute.manager [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Took 7.83 seconds to spawn the instance on the hypervisor. [ 1162.891211] env[69475]: DEBUG nova.compute.manager [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1162.892158] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7297b8c-e87c-494c-a9db-f607bbe03928 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.899987] env[69475]: DEBUG oslo_vmware.api [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509047, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.960874] env[69475]: DEBUG oslo_concurrency.lockutils [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.960927] env[69475]: DEBUG oslo_concurrency.lockutils [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquired lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1162.961157] env[69475]: DEBUG nova.network.neutron [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1163.245278] env[69475]: DEBUG oslo_vmware.api [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509046, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.384722} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.245432] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1163.245550] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1163.245728] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1163.245904] env[69475]: INFO nova.compute.manager [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1163.246163] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1163.246384] env[69475]: DEBUG nova.compute.manager [-] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1163.246505] env[69475]: DEBUG nova.network.neutron [-] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1163.310540] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 2ade2ed6-4725-4913-8ac4-14a96ced3e4b] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1163.407047] env[69475]: DEBUG oslo_vmware.api [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509047, 'name': ReconfigVM_Task, 'duration_secs': 0.253991} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.407047] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating instance '460d4b93-b18a-4965-9e2b-8c6175ccc91f' progress to 33 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1163.424355] env[69475]: INFO nova.compute.manager [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Took 14.46 seconds to build instance. [ 1163.536872] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6103ba75-5650-4928-a539-c8081a7198f7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.546023] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2571a73-f543-47a2-83e4-5ac72ffb64e3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.588049] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21461186-85dd-4cdc-8d45-3082735d697d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.593996] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a79363e-5196-4b92-82dc-91ae21939b1c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.609504] env[69475]: DEBUG nova.compute.provider_tree [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1163.794932] env[69475]: DEBUG nova.network.neutron [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Updating instance_info_cache with network_info: [{"id": "e27cfabc-cd13-4aaa-b9e1-eebffb18225e", "address": "fa:16:3e:9f:f8:e1", "network": {"id": "62083b30-e966-4802-8960-367f1137a879", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-79648352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572bc56741e24d57a4d01f202c8fb78d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape27cfabc-cd", "ovs_interfaceid": "e27cfabc-cd13-4aaa-b9e1-eebffb18225e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1163.817047] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 56f0e59a-1c37-4977-81dc-da1a274ce7e7] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1163.918934] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1163.919234] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1163.919517] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1163.919712] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1163.919859] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1163.920011] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1163.920226] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1163.920394] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1163.920627] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1163.920800] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1163.920972] env[69475]: DEBUG nova.virt.hardware [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1163.926572] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1163.927032] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9973a375-e747-4bab-8251-449c415a73c7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "eadfee29-c7fc-4d33-8869-7ea8e753554c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.978s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.927260] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90f6953b-f4f4-40f5-888d-3c5bce1e33cc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.948769] env[69475]: DEBUG oslo_vmware.api [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1163.948769] env[69475]: value = "task-3509048" [ 1163.948769] env[69475]: _type = "Task" [ 1163.948769] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.957068] env[69475]: DEBUG oslo_vmware.api [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509048, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.114124] env[69475]: DEBUG nova.scheduler.client.report [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1164.118851] env[69475]: DEBUG nova.network.neutron [-] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.298289] env[69475]: DEBUG oslo_concurrency.lockutils [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Releasing lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.321096] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: f9ba40e5-4053-462e-a0d8-6b0d3dd56ad0] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1164.330402] env[69475]: DEBUG nova.virt.hardware [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='4ab42bbc06554c30643974c404a9e908',container_format='bare',created_at=2025-04-22T09:44:25Z,direct_url=,disk_format='vmdk',id=6b1310bb-4147-4b4b-9e96-dde2c9000c1d,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-2106101923-shelved',owner='572bc56741e24d57a4d01f202c8fb78d',properties=ImageMetaProps,protected=,size=31667712,status='active',tags=,updated_at=2025-04-22T09:44:42Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1164.330669] env[69475]: DEBUG nova.virt.hardware [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1164.330826] env[69475]: DEBUG nova.virt.hardware [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1164.331013] env[69475]: DEBUG nova.virt.hardware [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1164.331778] env[69475]: DEBUG nova.virt.hardware [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1164.331946] env[69475]: DEBUG nova.virt.hardware [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1164.332174] env[69475]: DEBUG nova.virt.hardware [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1164.332341] env[69475]: DEBUG nova.virt.hardware [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1164.332512] env[69475]: DEBUG nova.virt.hardware [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1164.332673] env[69475]: DEBUG nova.virt.hardware [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1164.332844] env[69475]: DEBUG nova.virt.hardware [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1164.333735] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4755fb98-21e4-43f4-b166-f54fd864a07f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.344163] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38464098-8531-403b-9db9-7c5d48fb0732 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.361873] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:f8:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54495d8d-2696-4f65-b925-e567abdc205f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e27cfabc-cd13-4aaa-b9e1-eebffb18225e', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1164.372345] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1164.373929] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1164.373929] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-185ea746-8e77-4562-89c7-1b49ec78aef9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.392641] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1164.392641] env[69475]: value = "task-3509049" [ 1164.392641] env[69475]: _type = "Task" [ 1164.392641] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.403739] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509049, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.459221] env[69475]: DEBUG oslo_vmware.api [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509048, 'name': ReconfigVM_Task, 'duration_secs': 0.224992} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.459221] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1164.459431] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1599fa-ae77-4d9a-aaa0-3033188b28c4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.485527] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 460d4b93-b18a-4965-9e2b-8c6175ccc91f/460d4b93-b18a-4965-9e2b-8c6175ccc91f.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1164.485842] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8a629b2-1890-4a4d-b711-36ce92b2553a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.504110] env[69475]: DEBUG oslo_vmware.api [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1164.504110] env[69475]: value = "task-3509050" [ 1164.504110] env[69475]: _type = "Task" [ 1164.504110] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.518122] env[69475]: DEBUG oslo_vmware.api [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509050, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.620020] env[69475]: DEBUG oslo_concurrency.lockutils [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.867s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.623022] env[69475]: INFO nova.compute.manager [-] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Took 1.38 seconds to deallocate network for instance. [ 1164.651072] env[69475]: INFO nova.scheduler.client.report [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleted allocations for instance 4f091501-351c-45b8-9f64-4d28d4623df8 [ 1164.824256] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 86647493-8b2c-46bd-94d3-c973e843f778] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1164.831057] env[69475]: DEBUG nova.compute.manager [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Received event network-changed-e27cfabc-cd13-4aaa-b9e1-eebffb18225e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1164.831556] env[69475]: DEBUG nova.compute.manager [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Refreshing instance network info cache due to event network-changed-e27cfabc-cd13-4aaa-b9e1-eebffb18225e. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1164.831556] env[69475]: DEBUG oslo_concurrency.lockutils [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] Acquiring lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1164.831684] env[69475]: DEBUG oslo_concurrency.lockutils [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] Acquired lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1164.831850] env[69475]: DEBUG nova.network.neutron [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Refreshing network info cache for port e27cfabc-cd13-4aaa-b9e1-eebffb18225e {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1164.907160] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509049, 'name': CreateVM_Task, 'duration_secs': 0.402275} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.907160] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1164.907160] env[69475]: DEBUG oslo_concurrency.lockutils [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6b1310bb-4147-4b4b-9e96-dde2c9000c1d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1164.907160] env[69475]: DEBUG oslo_concurrency.lockutils [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6b1310bb-4147-4b4b-9e96-dde2c9000c1d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1164.907160] env[69475]: DEBUG oslo_concurrency.lockutils [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/6b1310bb-4147-4b4b-9e96-dde2c9000c1d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1164.907160] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a94b89e-9760-41eb-825e-0f95b443625e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.911317] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1164.911317] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ac4cbd-8a95-ee44-029d-bbc352ff143b" [ 1164.911317] env[69475]: _type = "Task" [ 1164.911317] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.921975] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52ac4cbd-8a95-ee44-029d-bbc352ff143b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.014537] env[69475]: DEBUG oslo_vmware.api [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509050, 'name': ReconfigVM_Task, 'duration_secs': 0.445454} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.014849] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 460d4b93-b18a-4965-9e2b-8c6175ccc91f/460d4b93-b18a-4965-9e2b-8c6175ccc91f.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1165.015390] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating instance '460d4b93-b18a-4965-9e2b-8c6175ccc91f' progress to 50 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1165.131802] env[69475]: DEBUG oslo_concurrency.lockutils [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1165.132112] env[69475]: DEBUG oslo_concurrency.lockutils [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.132334] env[69475]: DEBUG nova.objects.instance [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lazy-loading 'resources' on Instance uuid 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1165.161377] env[69475]: DEBUG oslo_concurrency.lockutils [None req-062aad24-9ddb-4e12-a8d4-7db0b32fec0e tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "4f091501-351c-45b8-9f64-4d28d4623df8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.679s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.328666] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 8cc0636c-84af-4f68-bec8-1493b421a605] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1165.427069] env[69475]: DEBUG oslo_concurrency.lockutils [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6b1310bb-4147-4b4b-9e96-dde2c9000c1d" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1165.427069] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Processing image 6b1310bb-4147-4b4b-9e96-dde2c9000c1d {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1165.427211] env[69475]: DEBUG oslo_concurrency.lockutils [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/6b1310bb-4147-4b4b-9e96-dde2c9000c1d/6b1310bb-4147-4b4b-9e96-dde2c9000c1d.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.427356] env[69475]: DEBUG oslo_concurrency.lockutils [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquired lock "[datastore2] devstack-image-cache_base/6b1310bb-4147-4b4b-9e96-dde2c9000c1d/6b1310bb-4147-4b4b-9e96-dde2c9000c1d.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1165.427531] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1165.427811] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-933b799e-8d96-4985-ad83-425b370c8de5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.441049] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1165.441243] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1165.442055] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d7d712f-97e5-44bc-ad99-e038c6f99d94 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.448435] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1165.448435] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52320f9c-bbbd-d417-29f4-67ccf5c4981a" [ 1165.448435] env[69475]: _type = "Task" [ 1165.448435] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.456496] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52320f9c-bbbd-d417-29f4-67ccf5c4981a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.522847] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5ce5c7-1f12-47fc-be92-e386a1850573 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.552820] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0ff294-d76f-47a1-b6c1-903de4fc250d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.577721] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating instance '460d4b93-b18a-4965-9e2b-8c6175ccc91f' progress to 67 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1165.833205] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: e960f967-d693-4ea8-9390-8b0232941c58] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1165.852053] env[69475]: DEBUG nova.network.neutron [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Updated VIF entry in instance network info cache for port e27cfabc-cd13-4aaa-b9e1-eebffb18225e. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1165.852422] env[69475]: DEBUG nova.network.neutron [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Updating instance_info_cache with network_info: [{"id": "e27cfabc-cd13-4aaa-b9e1-eebffb18225e", "address": "fa:16:3e:9f:f8:e1", "network": {"id": "62083b30-e966-4802-8960-367f1137a879", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-79648352-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.185", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "572bc56741e24d57a4d01f202c8fb78d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54495d8d-2696-4f65-b925-e567abdc205f", "external-id": "nsx-vlan-transportzone-220", "segmentation_id": 220, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape27cfabc-cd", "ovs_interfaceid": "e27cfabc-cd13-4aaa-b9e1-eebffb18225e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.867069] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0205702d-0fb2-4c7a-b04c-d8ef7a9aeed3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.877472] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504b0ecc-23be-4255-9da6-cdbfcc399b1d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.917534] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e03fd6-ddf8-4f87-b5dc-c2520f86d1a6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.926816] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d44dcf87-4f71-4fb0-8c16-966f504122e7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.941253] env[69475]: DEBUG nova.compute.provider_tree [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1165.959042] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Preparing fetch location {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1165.959352] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Fetch image to [datastore2] OSTACK_IMG_34069e0e-35b5-4542-a963-6022c860b01b/OSTACK_IMG_34069e0e-35b5-4542-a963-6022c860b01b.vmdk {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1165.959506] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Downloading stream optimized image 6b1310bb-4147-4b4b-9e96-dde2c9000c1d to [datastore2] OSTACK_IMG_34069e0e-35b5-4542-a963-6022c860b01b/OSTACK_IMG_34069e0e-35b5-4542-a963-6022c860b01b.vmdk on the data store datastore2 as vApp {{(pid=69475) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1165.959678] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Downloading image file data 6b1310bb-4147-4b4b-9e96-dde2c9000c1d to the ESX as VM named 'OSTACK_IMG_34069e0e-35b5-4542-a963-6022c860b01b' {{(pid=69475) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1165.974228] env[69475]: DEBUG nova.compute.manager [req-5a49f6f7-adaa-4e22-8734-ac8038796eef req-8d01c5b1-9124-40f7-9eb4-462f1fcb40d1 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Received event network-changed-6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1165.974454] env[69475]: DEBUG nova.compute.manager [req-5a49f6f7-adaa-4e22-8734-ac8038796eef req-8d01c5b1-9124-40f7-9eb4-462f1fcb40d1 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Refreshing instance network info cache due to event network-changed-6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1165.974738] env[69475]: DEBUG oslo_concurrency.lockutils [req-5a49f6f7-adaa-4e22-8734-ac8038796eef req-8d01c5b1-9124-40f7-9eb4-462f1fcb40d1 service nova] Acquiring lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.974887] env[69475]: DEBUG oslo_concurrency.lockutils [req-5a49f6f7-adaa-4e22-8734-ac8038796eef req-8d01c5b1-9124-40f7-9eb4-462f1fcb40d1 service nova] Acquired lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1165.975174] env[69475]: DEBUG nova.network.neutron [req-5a49f6f7-adaa-4e22-8734-ac8038796eef req-8d01c5b1-9124-40f7-9eb4-462f1fcb40d1 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Refreshing network info cache for port 6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1166.039893] env[69475]: DEBUG oslo_vmware.rw_handles [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1166.039893] env[69475]: value = "resgroup-9" [ 1166.039893] env[69475]: _type = "ResourcePool" [ 1166.039893] env[69475]: }. {{(pid=69475) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1166.040198] env[69475]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-de75f1f3-93d9-4720-bf3a-abc3fdfb2e2e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.061192] env[69475]: DEBUG oslo_vmware.rw_handles [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lease: (returnval){ [ 1166.061192] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52af5ad1-a7bf-01a1-1338-ffabc17f8acb" [ 1166.061192] env[69475]: _type = "HttpNfcLease" [ 1166.061192] env[69475]: } obtained for vApp import into resource pool (val){ [ 1166.061192] env[69475]: value = "resgroup-9" [ 1166.061192] env[69475]: _type = "ResourcePool" [ 1166.061192] env[69475]: }. {{(pid=69475) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1166.061511] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the lease: (returnval){ [ 1166.061511] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52af5ad1-a7bf-01a1-1338-ffabc17f8acb" [ 1166.061511] env[69475]: _type = "HttpNfcLease" [ 1166.061511] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1166.070091] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1166.070091] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52af5ad1-a7bf-01a1-1338-ffabc17f8acb" [ 1166.070091] env[69475]: _type = "HttpNfcLease" [ 1166.070091] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1166.336758] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 2b0cc71c-862e-4eb0-afc4-b2125003b087] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1166.355869] env[69475]: DEBUG oslo_concurrency.lockutils [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] Releasing lock "refresh_cache-e10a197a-a9b7-43ce-b8a8-ce186619feb9" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1166.356135] env[69475]: DEBUG nova.compute.manager [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] [instance: 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77] Received event network-vif-deleted-edf63788-66d9-4b3f-9e14-312bd4b1312f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1166.356357] env[69475]: DEBUG nova.compute.manager [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Received event network-changed-f953a932-b0a0-4620-ae5b-9a9cda24d9a4 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1166.356544] env[69475]: DEBUG nova.compute.manager [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Refreshing instance network info cache due to event network-changed-f953a932-b0a0-4620-ae5b-9a9cda24d9a4. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1166.356756] env[69475]: DEBUG oslo_concurrency.lockutils [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] Acquiring lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.356895] env[69475]: DEBUG oslo_concurrency.lockutils [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] Acquired lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1166.357076] env[69475]: DEBUG nova.network.neutron [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Refreshing network info cache for port f953a932-b0a0-4620-ae5b-9a9cda24d9a4 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1166.412316] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "b6a785b0-7ae8-4856-b5a8-e017cfd376d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.412564] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "b6a785b0-7ae8-4856-b5a8-e017cfd376d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.444065] env[69475]: DEBUG nova.scheduler.client.report [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1166.570612] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1166.570612] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52af5ad1-a7bf-01a1-1338-ffabc17f8acb" [ 1166.570612] env[69475]: _type = "HttpNfcLease" [ 1166.570612] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1166.690021] env[69475]: DEBUG nova.network.neutron [req-5a49f6f7-adaa-4e22-8734-ac8038796eef req-8d01c5b1-9124-40f7-9eb4-462f1fcb40d1 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Updated VIF entry in instance network info cache for port 6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1166.690021] env[69475]: DEBUG nova.network.neutron [req-5a49f6f7-adaa-4e22-8734-ac8038796eef req-8d01c5b1-9124-40f7-9eb4-462f1fcb40d1 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Updating instance_info_cache with network_info: [{"id": "6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c", "address": "fa:16:3e:41:b4:2a", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e3f5a22-9b", "ovs_interfaceid": "6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1166.840238] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 86464a01-e034-43b6-a6d5-45f9e3b6715b] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1166.915706] env[69475]: DEBUG nova.compute.manager [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1166.952061] env[69475]: DEBUG oslo_concurrency.lockutils [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.817s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.975729] env[69475]: INFO nova.scheduler.client.report [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleted allocations for instance 1275b0c3-6d9f-48e5-acf0-2ee747b1bb77 [ 1167.069902] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1167.069902] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52af5ad1-a7bf-01a1-1338-ffabc17f8acb" [ 1167.069902] env[69475]: _type = "HttpNfcLease" [ 1167.069902] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1167.070283] env[69475]: DEBUG oslo_vmware.rw_handles [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1167.070283] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52af5ad1-a7bf-01a1-1338-ffabc17f8acb" [ 1167.070283] env[69475]: _type = "HttpNfcLease" [ 1167.070283] env[69475]: }. {{(pid=69475) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1167.070948] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dee8fd9-84c2-414d-ac7a-79beb8547264 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.079446] env[69475]: DEBUG oslo_vmware.rw_handles [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525acfa0-57a7-84de-8f03-5c97b8a70916/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1167.079618] env[69475]: DEBUG oslo_vmware.rw_handles [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Creating HTTP connection to write to file with size = 31667712 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525acfa0-57a7-84de-8f03-5c97b8a70916/disk-0.vmdk. {{(pid=69475) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1167.081371] env[69475]: DEBUG nova.network.neutron [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Updated VIF entry in instance network info cache for port f953a932-b0a0-4620-ae5b-9a9cda24d9a4. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1167.081688] env[69475]: DEBUG nova.network.neutron [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Updating instance_info_cache with network_info: [{"id": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "address": "fa:16:3e:74:59:f0", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf953a932-b0", "ovs_interfaceid": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.137465] env[69475]: DEBUG oslo_concurrency.lockutils [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] Releasing lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1167.137662] env[69475]: DEBUG nova.compute.manager [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Received event network-changed-6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1167.137825] env[69475]: DEBUG nova.compute.manager [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Refreshing instance network info cache due to event network-changed-6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1167.137999] env[69475]: DEBUG oslo_concurrency.lockutils [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] Acquiring lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1167.144211] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-db6157ae-c242-41c7-8a3f-1da49d76b79c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.194028] env[69475]: DEBUG oslo_concurrency.lockutils [req-5a49f6f7-adaa-4e22-8734-ac8038796eef req-8d01c5b1-9124-40f7-9eb4-462f1fcb40d1 service nova] Releasing lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1167.194384] env[69475]: DEBUG oslo_concurrency.lockutils [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] Acquired lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1167.194580] env[69475]: DEBUG nova.network.neutron [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Refreshing network info cache for port 6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1167.227389] env[69475]: DEBUG nova.network.neutron [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Port 81121438-ec92-4519-97f1-e2a871109623 binding to destination host cpu-1 is already ACTIVE {{(pid=69475) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1167.343774] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 8bea34ef-0caf-4cdb-a689-dd747d9b52ea] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1167.435756] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.436048] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.437579] env[69475]: INFO nova.compute.claims [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1167.487129] env[69475]: DEBUG oslo_concurrency.lockutils [None req-98bf161c-ef56-4abe-a9a6-39815e30891c tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "1275b0c3-6d9f-48e5-acf0-2ee747b1bb77" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.885s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.849093] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: baf27027-678d-4167-bb9b-df410aeb0e82] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1167.919591] env[69475]: DEBUG nova.network.neutron [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Updated VIF entry in instance network info cache for port 6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1167.919966] env[69475]: DEBUG nova.network.neutron [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Updating instance_info_cache with network_info: [{"id": "6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c", "address": "fa:16:3e:41:b4:2a", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e3f5a22-9b", "ovs_interfaceid": "6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1168.004574] env[69475]: DEBUG nova.compute.manager [req-9b58c13d-ab39-4b9f-b5ca-8b2d7e7afbd1 req-c6427430-5bc2-4725-bd97-f9143ba32c1e service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Received event network-changed-f953a932-b0a0-4620-ae5b-9a9cda24d9a4 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1168.004574] env[69475]: DEBUG nova.compute.manager [req-9b58c13d-ab39-4b9f-b5ca-8b2d7e7afbd1 req-c6427430-5bc2-4725-bd97-f9143ba32c1e service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Refreshing instance network info cache due to event network-changed-f953a932-b0a0-4620-ae5b-9a9cda24d9a4. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1168.004708] env[69475]: DEBUG oslo_concurrency.lockutils [req-9b58c13d-ab39-4b9f-b5ca-8b2d7e7afbd1 req-c6427430-5bc2-4725-bd97-f9143ba32c1e service nova] Acquiring lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1168.004879] env[69475]: DEBUG oslo_concurrency.lockutils [req-9b58c13d-ab39-4b9f-b5ca-8b2d7e7afbd1 req-c6427430-5bc2-4725-bd97-f9143ba32c1e service nova] Acquired lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1168.004990] env[69475]: DEBUG nova.network.neutron [req-9b58c13d-ab39-4b9f-b5ca-8b2d7e7afbd1 req-c6427430-5bc2-4725-bd97-f9143ba32c1e service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Refreshing network info cache for port f953a932-b0a0-4620-ae5b-9a9cda24d9a4 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1168.215888] env[69475]: DEBUG oslo_vmware.rw_handles [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Completed reading data from the image iterator. {{(pid=69475) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1168.216121] env[69475]: DEBUG oslo_vmware.rw_handles [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525acfa0-57a7-84de-8f03-5c97b8a70916/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1168.217163] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bedcfb59-7201-445a-b91a-6b7a5dc55851 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.224089] env[69475]: DEBUG oslo_vmware.rw_handles [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525acfa0-57a7-84de-8f03-5c97b8a70916/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1168.224257] env[69475]: DEBUG oslo_vmware.rw_handles [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525acfa0-57a7-84de-8f03-5c97b8a70916/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1168.224485] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-fcf6fd5c-cdd4-404c-a694-95843048868c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.251399] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.251399] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.251497] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.354611] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 4b3b53d1-82bf-40e7-9988-af7b51e9883a] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1168.422377] env[69475]: DEBUG oslo_concurrency.lockutils [req-1a9fd6b3-dbce-4a24-abcc-6c437a434719 req-ff189659-6ff3-43df-a6b7-c4b26e7879e2 service nova] Releasing lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1168.615890] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64e6a67a-eac5-4b6d-b8c9-ab4d300108d0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.623868] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9323feaa-2e0e-49a4-b44c-e4eba878a5fb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.656347] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-619b6e60-b7c0-47ca-b036-9c19151ab3b8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.663789] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262915e6-1b65-46d7-85e2-b182b39e8ebd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.678437] env[69475]: DEBUG nova.compute.provider_tree [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1168.689554] env[69475]: DEBUG oslo_vmware.rw_handles [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/525acfa0-57a7-84de-8f03-5c97b8a70916/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1168.690254] env[69475]: INFO nova.virt.vmwareapi.images [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Downloaded image file data 6b1310bb-4147-4b4b-9e96-dde2c9000c1d [ 1168.690722] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e7a121-1a22-4299-86cf-448d2f8d287c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.709155] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8cfac9df-1e5e-4a9b-bf08-cc6eae834287 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.757345] env[69475]: DEBUG nova.network.neutron [req-9b58c13d-ab39-4b9f-b5ca-8b2d7e7afbd1 req-c6427430-5bc2-4725-bd97-f9143ba32c1e service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Updated VIF entry in instance network info cache for port f953a932-b0a0-4620-ae5b-9a9cda24d9a4. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1168.757446] env[69475]: DEBUG nova.network.neutron [req-9b58c13d-ab39-4b9f-b5ca-8b2d7e7afbd1 req-c6427430-5bc2-4725-bd97-f9143ba32c1e service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Updating instance_info_cache with network_info: [{"id": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "address": "fa:16:3e:74:59:f0", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf953a932-b0", "ovs_interfaceid": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1168.807839] env[69475]: INFO nova.virt.vmwareapi.images [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] The imported VM was unregistered [ 1168.810104] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Caching image {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1168.810348] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Creating directory with path [datastore2] devstack-image-cache_base/6b1310bb-4147-4b4b-9e96-dde2c9000c1d {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1168.810621] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e977aab9-b14d-4136-8a6d-b02f0edb3fdc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.826083] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Created directory with path [datastore2] devstack-image-cache_base/6b1310bb-4147-4b4b-9e96-dde2c9000c1d {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1168.826271] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_34069e0e-35b5-4542-a963-6022c860b01b/OSTACK_IMG_34069e0e-35b5-4542-a963-6022c860b01b.vmdk to [datastore2] devstack-image-cache_base/6b1310bb-4147-4b4b-9e96-dde2c9000c1d/6b1310bb-4147-4b4b-9e96-dde2c9000c1d.vmdk. {{(pid=69475) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1168.826512] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-54146da0-9fa1-476b-abee-efbedd4547d5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.832272] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1168.832272] env[69475]: value = "task-3509053" [ 1168.832272] env[69475]: _type = "Task" [ 1168.832272] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.840249] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509053, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.858044] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 7cb0c166-0cd1-4af3-9ad4-4d4f857eedc9] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1169.161674] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "123426f0-207f-4a57-8211-8fd4e8ea9daf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1169.162052] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "123426f0-207f-4a57-8211-8fd4e8ea9daf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1169.182071] env[69475]: DEBUG nova.scheduler.client.report [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1169.259519] env[69475]: DEBUG oslo_concurrency.lockutils [req-9b58c13d-ab39-4b9f-b5ca-8b2d7e7afbd1 req-c6427430-5bc2-4725-bd97-f9143ba32c1e service nova] Releasing lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1169.288933] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1169.289153] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1169.289337] env[69475]: DEBUG nova.network.neutron [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1169.344672] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509053, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.361458] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: b41845c6-46bd-4b3b-ab26-d7d2dad08f84] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1169.664697] env[69475]: DEBUG nova.compute.manager [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1169.686806] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.251s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1169.687370] env[69475]: DEBUG nova.compute.manager [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1169.844236] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509053, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.865617] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: d1e5e08d-b41a-4655-997d-91fbd3581f00] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1170.041784] env[69475]: DEBUG nova.network.neutron [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating instance_info_cache with network_info: [{"id": "81121438-ec92-4519-97f1-e2a871109623", "address": "fa:16:3e:98:9c:8a", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81121438-ec", "ovs_interfaceid": "81121438-ec92-4519-97f1-e2a871109623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.189329] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.189329] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.190881] env[69475]: INFO nova.compute.claims [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1170.194554] env[69475]: DEBUG nova.compute.utils [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1170.196081] env[69475]: DEBUG nova.compute.manager [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1170.196266] env[69475]: DEBUG nova.network.neutron [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1170.235016] env[69475]: DEBUG nova.policy [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba09f56e4fda4fc99602796a0af6cb33', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e87670cfd2b848af98507a5ebf9fab51', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1170.344573] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509053, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.369951] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: a21ec73a-2658-4fc6-9bc1-0e492385d59e] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1170.524716] env[69475]: DEBUG nova.network.neutron [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Successfully created port: cc57d7c3-7051-4e4b-95c6-c1bffe25471e {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1170.544901] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1170.699961] env[69475]: DEBUG nova.compute.manager [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1170.845262] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509053, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.873030] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 235653ac-a893-4f42-a394-dd81f61f0d73] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1171.055556] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b27ccb-3e66-4cc1-bbf1-90de6f73f93b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.062366] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e15545-dd30-472e-accc-fa7073c6f5d1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.081255] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.081508] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.346817] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509053, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.287119} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.346817] env[69475]: INFO nova.virt.vmwareapi.ds_util [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_34069e0e-35b5-4542-a963-6022c860b01b/OSTACK_IMG_34069e0e-35b5-4542-a963-6022c860b01b.vmdk to [datastore2] devstack-image-cache_base/6b1310bb-4147-4b4b-9e96-dde2c9000c1d/6b1310bb-4147-4b4b-9e96-dde2c9000c1d.vmdk. [ 1171.346960] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Cleaning up location [datastore2] OSTACK_IMG_34069e0e-35b5-4542-a963-6022c860b01b {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1171.347079] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_34069e0e-35b5-4542-a963-6022c860b01b {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1171.347332] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cec95b11-8836-4d6a-9b7c-1fc70c313ebb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.354089] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1171.354089] env[69475]: value = "task-3509054" [ 1171.354089] env[69475]: _type = "Task" [ 1171.354089] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.362656] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509054, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.376282] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 93607154-f135-4925-9c3a-a97051535b00] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1171.396229] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef53108b-cbc0-4f6c-bfc2-3fa44a239b87 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.404411] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2087015-a096-4523-af9a-953c344148bc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.437213] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a230b6a-d322-4335-b692-f4e9ea246ec0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.444929] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b46fed32-a5b6-494f-bd18-e74790d5f22b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.458405] env[69475]: DEBUG nova.compute.provider_tree [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1171.585492] env[69475]: DEBUG nova.compute.utils [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1171.718062] env[69475]: DEBUG nova.compute.manager [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1171.745382] env[69475]: DEBUG nova.virt.hardware [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1171.745657] env[69475]: DEBUG nova.virt.hardware [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1171.745819] env[69475]: DEBUG nova.virt.hardware [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1171.746011] env[69475]: DEBUG nova.virt.hardware [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1171.746166] env[69475]: DEBUG nova.virt.hardware [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1171.746317] env[69475]: DEBUG nova.virt.hardware [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1171.746600] env[69475]: DEBUG nova.virt.hardware [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1171.746769] env[69475]: DEBUG nova.virt.hardware [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1171.746942] env[69475]: DEBUG nova.virt.hardware [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1171.747123] env[69475]: DEBUG nova.virt.hardware [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1171.747302] env[69475]: DEBUG nova.virt.hardware [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1171.748169] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2654a2fb-9cf8-4384-8d32-dd6f1f40f341 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.756580] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61e8eea-553f-4c10-944f-8bc188770b35 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.863580] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509054, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.032434} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.863840] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1171.864013] env[69475]: DEBUG oslo_concurrency.lockutils [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Releasing lock "[datastore2] devstack-image-cache_base/6b1310bb-4147-4b4b-9e96-dde2c9000c1d/6b1310bb-4147-4b4b-9e96-dde2c9000c1d.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1171.864269] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/6b1310bb-4147-4b4b-9e96-dde2c9000c1d/6b1310bb-4147-4b4b-9e96-dde2c9000c1d.vmdk to [datastore2] e10a197a-a9b7-43ce-b8a8-ce186619feb9/e10a197a-a9b7-43ce-b8a8-ce186619feb9.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1171.864518] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9b6a24b3-6e0a-4082-94ae-ad4ced3b0fd3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.871196] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1171.871196] env[69475]: value = "task-3509055" [ 1171.871196] env[69475]: _type = "Task" [ 1171.871196] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.878551] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509055, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.878941] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 7be48799-ea4a-4e7f-95c2-637460596cfc] Instance has had 0 of 5 cleanup attempts {{(pid=69475) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1171.935411] env[69475]: DEBUG nova.compute.manager [req-24e19f5e-5ba3-4e79-912e-ac5e4da712f9 req-6b10fa4d-7e2e-46aa-ab92-ffed5fdee9c3 service nova] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Received event network-vif-plugged-cc57d7c3-7051-4e4b-95c6-c1bffe25471e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1171.935537] env[69475]: DEBUG oslo_concurrency.lockutils [req-24e19f5e-5ba3-4e79-912e-ac5e4da712f9 req-6b10fa4d-7e2e-46aa-ab92-ffed5fdee9c3 service nova] Acquiring lock "b6a785b0-7ae8-4856-b5a8-e017cfd376d8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.935750] env[69475]: DEBUG oslo_concurrency.lockutils [req-24e19f5e-5ba3-4e79-912e-ac5e4da712f9 req-6b10fa4d-7e2e-46aa-ab92-ffed5fdee9c3 service nova] Lock "b6a785b0-7ae8-4856-b5a8-e017cfd376d8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.935913] env[69475]: DEBUG oslo_concurrency.lockutils [req-24e19f5e-5ba3-4e79-912e-ac5e4da712f9 req-6b10fa4d-7e2e-46aa-ab92-ffed5fdee9c3 service nova] Lock "b6a785b0-7ae8-4856-b5a8-e017cfd376d8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.936098] env[69475]: DEBUG nova.compute.manager [req-24e19f5e-5ba3-4e79-912e-ac5e4da712f9 req-6b10fa4d-7e2e-46aa-ab92-ffed5fdee9c3 service nova] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] No waiting events found dispatching network-vif-plugged-cc57d7c3-7051-4e4b-95c6-c1bffe25471e {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1171.936268] env[69475]: WARNING nova.compute.manager [req-24e19f5e-5ba3-4e79-912e-ac5e4da712f9 req-6b10fa4d-7e2e-46aa-ab92-ffed5fdee9c3 service nova] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Received unexpected event network-vif-plugged-cc57d7c3-7051-4e4b-95c6-c1bffe25471e for instance with vm_state building and task_state spawning. [ 1171.961988] env[69475]: DEBUG nova.scheduler.client.report [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1172.026143] env[69475]: DEBUG nova.network.neutron [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Successfully updated port: cc57d7c3-7051-4e4b-95c6-c1bffe25471e {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1172.090035] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.191593] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e71bbfcc-8864-49d7-8067-d8826f07abe8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.214388] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e58f18-0452-4bda-ad2d-747f9a64799c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.222251] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating instance '460d4b93-b18a-4965-9e2b-8c6175ccc91f' progress to 83 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1172.382525] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509055, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.382853] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.383050] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Cleaning up deleted instances with incomplete migration {{(pid=69475) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1172.466933] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.278s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.467537] env[69475]: DEBUG nova.compute.manager [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1172.530480] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "refresh_cache-b6a785b0-7ae8-4856-b5a8-e017cfd376d8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.530480] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "refresh_cache-b6a785b0-7ae8-4856-b5a8-e017cfd376d8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1172.530480] env[69475]: DEBUG nova.network.neutron [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1172.729340] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1172.729669] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9b2b225-4e41-48f6-9ffa-0d14e94b2d9e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.737644] env[69475]: DEBUG oslo_vmware.api [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1172.737644] env[69475]: value = "task-3509056" [ 1172.737644] env[69475]: _type = "Task" [ 1172.737644] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1172.746509] env[69475]: DEBUG oslo_vmware.api [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509056, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.882275] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509055, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.886063] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.973024] env[69475]: DEBUG nova.compute.utils [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1172.975411] env[69475]: DEBUG nova.compute.manager [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1172.975623] env[69475]: DEBUG nova.network.neutron [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1173.038061] env[69475]: DEBUG nova.policy [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '50223677b1b84004ad2ae335882b0bf2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '52941494ff1643f6bb75cc1320a86b88', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1173.065821] env[69475]: DEBUG nova.network.neutron [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1173.165883] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.166221] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.166741] env[69475]: INFO nova.compute.manager [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Attaching volume 3aa644aa-04af-438a-bdbf-5764402265a2 to /dev/sdb [ 1173.201960] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bc892ea-2e7d-4378-b49a-816b49565d1f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.210866] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83e1447b-1e99-46bb-acbb-e76526484784 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.214535] env[69475]: DEBUG nova.network.neutron [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Updating instance_info_cache with network_info: [{"id": "cc57d7c3-7051-4e4b-95c6-c1bffe25471e", "address": "fa:16:3e:42:17:b5", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc57d7c3-70", "ovs_interfaceid": "cc57d7c3-7051-4e4b-95c6-c1bffe25471e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.227828] env[69475]: DEBUG nova.virt.block_device [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Updating existing volume attachment record: 143f16ac-3cb5-4761-b93e-76fcd0209e9c {{(pid=69475) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1173.249800] env[69475]: DEBUG oslo_vmware.api [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509056, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.329373] env[69475]: DEBUG nova.network.neutron [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Successfully created port: fa01b86b-5802-422e-b736-87395230d146 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1173.382204] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509055, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.480781] env[69475]: DEBUG nova.compute.manager [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1173.718990] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "refresh_cache-b6a785b0-7ae8-4856-b5a8-e017cfd376d8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1173.719423] env[69475]: DEBUG nova.compute.manager [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Instance network_info: |[{"id": "cc57d7c3-7051-4e4b-95c6-c1bffe25471e", "address": "fa:16:3e:42:17:b5", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc57d7c3-70", "ovs_interfaceid": "cc57d7c3-7051-4e4b-95c6-c1bffe25471e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1173.720425] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:17:b5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee9ce73d-4ee8-4b28-b7d3-3a5735039627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc57d7c3-7051-4e4b-95c6-c1bffe25471e', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1173.730498] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1173.730755] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1173.730996] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d932a86-7417-4f0a-af58-ab1d7775c308 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.754876] env[69475]: DEBUG oslo_vmware.api [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509056, 'name': PowerOnVM_Task, 'duration_secs': 0.600173} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1173.756290] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1173.756513] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3fe8094a-6ec9-4018-a3b1-db03189dc2dc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating instance '460d4b93-b18a-4965-9e2b-8c6175ccc91f' progress to 100 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1173.760802] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1173.760802] env[69475]: value = "task-3509058" [ 1173.760802] env[69475]: _type = "Task" [ 1173.760802] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1173.769413] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509058, 'name': CreateVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1173.884883] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509055, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.076720] env[69475]: DEBUG nova.compute.manager [req-a2e350a5-7750-42ca-9e91-3930f6b6ddde req-e25e0e98-1f51-43c3-8c8a-653858d3c7c3 service nova] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Received event network-changed-cc57d7c3-7051-4e4b-95c6-c1bffe25471e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1174.076720] env[69475]: DEBUG nova.compute.manager [req-a2e350a5-7750-42ca-9e91-3930f6b6ddde req-e25e0e98-1f51-43c3-8c8a-653858d3c7c3 service nova] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Refreshing instance network info cache due to event network-changed-cc57d7c3-7051-4e4b-95c6-c1bffe25471e. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1174.076720] env[69475]: DEBUG oslo_concurrency.lockutils [req-a2e350a5-7750-42ca-9e91-3930f6b6ddde req-e25e0e98-1f51-43c3-8c8a-653858d3c7c3 service nova] Acquiring lock "refresh_cache-b6a785b0-7ae8-4856-b5a8-e017cfd376d8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1174.076720] env[69475]: DEBUG oslo_concurrency.lockutils [req-a2e350a5-7750-42ca-9e91-3930f6b6ddde req-e25e0e98-1f51-43c3-8c8a-653858d3c7c3 service nova] Acquired lock "refresh_cache-b6a785b0-7ae8-4856-b5a8-e017cfd376d8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1174.076991] env[69475]: DEBUG nova.network.neutron [req-a2e350a5-7750-42ca-9e91-3930f6b6ddde req-e25e0e98-1f51-43c3-8c8a-653858d3c7c3 service nova] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Refreshing network info cache for port cc57d7c3-7051-4e4b-95c6-c1bffe25471e {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1174.275413] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509058, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.385635] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509055, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.421069} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.385898] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/6b1310bb-4147-4b4b-9e96-dde2c9000c1d/6b1310bb-4147-4b4b-9e96-dde2c9000c1d.vmdk to [datastore2] e10a197a-a9b7-43ce-b8a8-ce186619feb9/e10a197a-a9b7-43ce-b8a8-ce186619feb9.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1174.386683] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f1e9320-b5b5-4c24-9108-d221b1028575 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.409128] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] e10a197a-a9b7-43ce-b8a8-ce186619feb9/e10a197a-a9b7-43ce-b8a8-ce186619feb9.vmdk or device None with type streamOptimized {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1174.409376] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b6da28f-a8ee-491a-9f05-789e570078d7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.427898] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1174.427898] env[69475]: value = "task-3509059" [ 1174.427898] env[69475]: _type = "Task" [ 1174.427898] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.435150] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509059, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.494368] env[69475]: DEBUG nova.compute.manager [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1174.519985] env[69475]: DEBUG nova.virt.hardware [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1174.520311] env[69475]: DEBUG nova.virt.hardware [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1174.520489] env[69475]: DEBUG nova.virt.hardware [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1174.520691] env[69475]: DEBUG nova.virt.hardware [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1174.520846] env[69475]: DEBUG nova.virt.hardware [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1174.521013] env[69475]: DEBUG nova.virt.hardware [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1174.521242] env[69475]: DEBUG nova.virt.hardware [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1174.521417] env[69475]: DEBUG nova.virt.hardware [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1174.521603] env[69475]: DEBUG nova.virt.hardware [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1174.521771] env[69475]: DEBUG nova.virt.hardware [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1174.521960] env[69475]: DEBUG nova.virt.hardware [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1174.522861] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fbd1df6-87bf-4065-b054-5d158657627d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.530732] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40a5f40f-52f9-4877-afa1-d5b4534870c1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.770182] env[69475]: DEBUG nova.network.neutron [req-a2e350a5-7750-42ca-9e91-3930f6b6ddde req-e25e0e98-1f51-43c3-8c8a-653858d3c7c3 service nova] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Updated VIF entry in instance network info cache for port cc57d7c3-7051-4e4b-95c6-c1bffe25471e. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1174.770549] env[69475]: DEBUG nova.network.neutron [req-a2e350a5-7750-42ca-9e91-3930f6b6ddde req-e25e0e98-1f51-43c3-8c8a-653858d3c7c3 service nova] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Updating instance_info_cache with network_info: [{"id": "cc57d7c3-7051-4e4b-95c6-c1bffe25471e", "address": "fa:16:3e:42:17:b5", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc57d7c3-70", "ovs_interfaceid": "cc57d7c3-7051-4e4b-95c6-c1bffe25471e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1174.777621] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509058, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1174.814737] env[69475]: DEBUG nova.network.neutron [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Successfully updated port: fa01b86b-5802-422e-b736-87395230d146 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1174.937727] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509059, 'name': ReconfigVM_Task, 'duration_secs': 0.254861} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1174.939024] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Reconfigured VM instance instance-00000068 to attach disk [datastore2] e10a197a-a9b7-43ce-b8a8-ce186619feb9/e10a197a-a9b7-43ce-b8a8-ce186619feb9.vmdk or device None with type streamOptimized {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1174.939024] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d376e921-9cbe-4546-8e4c-325a226e4e8a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.944851] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1174.944851] env[69475]: value = "task-3509060" [ 1174.944851] env[69475]: _type = "Task" [ 1174.944851] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1174.952967] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509060, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.276583] env[69475]: DEBUG oslo_concurrency.lockutils [req-a2e350a5-7750-42ca-9e91-3930f6b6ddde req-e25e0e98-1f51-43c3-8c8a-653858d3c7c3 service nova] Releasing lock "refresh_cache-b6a785b0-7ae8-4856-b5a8-e017cfd376d8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1175.277012] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509058, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.317794] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "refresh_cache-123426f0-207f-4a57-8211-8fd4e8ea9daf" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.317794] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "refresh_cache-123426f0-207f-4a57-8211-8fd4e8ea9daf" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1175.317794] env[69475]: DEBUG nova.network.neutron [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1175.453722] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509060, 'name': Rename_Task, 'duration_secs': 0.138503} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.453997] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1175.454250] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2b03629-c67f-47eb-8b3d-acb926ef7e8c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.462172] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1175.462172] env[69475]: value = "task-3509061" [ 1175.462172] env[69475]: _type = "Task" [ 1175.462172] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.468533] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509061, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.777892] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509058, 'name': CreateVM_Task, 'duration_secs': 1.536214} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.778082] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1175.778757] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.778927] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1175.779268] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1175.779515] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d97c55f1-5de3-414e-9e94-be4b590251f4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1175.784061] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1175.784061] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521bb8f5-b9c6-4619-e278-0740d1b5c246" [ 1175.784061] env[69475]: _type = "Task" [ 1175.784061] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1175.792081] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521bb8f5-b9c6-4619-e278-0740d1b5c246, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1175.852443] env[69475]: DEBUG nova.network.neutron [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1175.968646] env[69475]: DEBUG oslo_vmware.api [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509061, 'name': PowerOnVM_Task, 'duration_secs': 0.460052} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1175.968992] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1175.999968] env[69475]: DEBUG nova.network.neutron [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Updating instance_info_cache with network_info: [{"id": "fa01b86b-5802-422e-b736-87395230d146", "address": "fa:16:3e:55:49:ed", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa01b86b-58", "ovs_interfaceid": "fa01b86b-5802-422e-b736-87395230d146", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.079709] env[69475]: DEBUG nova.compute.manager [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1176.080639] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4affc6fc-23b9-4ec4-8ec0-1d041b9fe9e5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.106074] env[69475]: DEBUG nova.compute.manager [req-a5e0c4f9-16bc-4f8c-9fdb-c95eef0128c9 req-4894c864-a2d6-48cb-b42b-21e185a4be0c service nova] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Received event network-vif-plugged-fa01b86b-5802-422e-b736-87395230d146 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1176.106322] env[69475]: DEBUG oslo_concurrency.lockutils [req-a5e0c4f9-16bc-4f8c-9fdb-c95eef0128c9 req-4894c864-a2d6-48cb-b42b-21e185a4be0c service nova] Acquiring lock "123426f0-207f-4a57-8211-8fd4e8ea9daf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.106519] env[69475]: DEBUG oslo_concurrency.lockutils [req-a5e0c4f9-16bc-4f8c-9fdb-c95eef0128c9 req-4894c864-a2d6-48cb-b42b-21e185a4be0c service nova] Lock "123426f0-207f-4a57-8211-8fd4e8ea9daf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.106686] env[69475]: DEBUG oslo_concurrency.lockutils [req-a5e0c4f9-16bc-4f8c-9fdb-c95eef0128c9 req-4894c864-a2d6-48cb-b42b-21e185a4be0c service nova] Lock "123426f0-207f-4a57-8211-8fd4e8ea9daf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.106850] env[69475]: DEBUG nova.compute.manager [req-a5e0c4f9-16bc-4f8c-9fdb-c95eef0128c9 req-4894c864-a2d6-48cb-b42b-21e185a4be0c service nova] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] No waiting events found dispatching network-vif-plugged-fa01b86b-5802-422e-b736-87395230d146 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1176.107036] env[69475]: WARNING nova.compute.manager [req-a5e0c4f9-16bc-4f8c-9fdb-c95eef0128c9 req-4894c864-a2d6-48cb-b42b-21e185a4be0c service nova] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Received unexpected event network-vif-plugged-fa01b86b-5802-422e-b736-87395230d146 for instance with vm_state building and task_state spawning. [ 1176.107185] env[69475]: DEBUG nova.compute.manager [req-a5e0c4f9-16bc-4f8c-9fdb-c95eef0128c9 req-4894c864-a2d6-48cb-b42b-21e185a4be0c service nova] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Received event network-changed-fa01b86b-5802-422e-b736-87395230d146 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1176.107338] env[69475]: DEBUG nova.compute.manager [req-a5e0c4f9-16bc-4f8c-9fdb-c95eef0128c9 req-4894c864-a2d6-48cb-b42b-21e185a4be0c service nova] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Refreshing instance network info cache due to event network-changed-fa01b86b-5802-422e-b736-87395230d146. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1176.107502] env[69475]: DEBUG oslo_concurrency.lockutils [req-a5e0c4f9-16bc-4f8c-9fdb-c95eef0128c9 req-4894c864-a2d6-48cb-b42b-21e185a4be0c service nova] Acquiring lock "refresh_cache-123426f0-207f-4a57-8211-8fd4e8ea9daf" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.294539] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521bb8f5-b9c6-4619-e278-0740d1b5c246, 'name': SearchDatastore_Task, 'duration_secs': 0.009585} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.294830] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1176.295073] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1176.295312] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.295497] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1176.295706] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1176.295961] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac978a75-1734-4369-9668-81d092b5b3cb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.304269] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1176.304452] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1176.305158] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67a1fcb2-23c8-4683-a773-41046c7266a1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.310182] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1176.310182] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e07a94-49f7-534b-5c5f-54b02c4210ed" [ 1176.310182] env[69475]: _type = "Task" [ 1176.310182] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.318284] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e07a94-49f7-534b-5c5f-54b02c4210ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.503323] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "refresh_cache-123426f0-207f-4a57-8211-8fd4e8ea9daf" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1176.503700] env[69475]: DEBUG nova.compute.manager [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Instance network_info: |[{"id": "fa01b86b-5802-422e-b736-87395230d146", "address": "fa:16:3e:55:49:ed", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa01b86b-58", "ovs_interfaceid": "fa01b86b-5802-422e-b736-87395230d146", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1176.504037] env[69475]: DEBUG oslo_concurrency.lockutils [req-a5e0c4f9-16bc-4f8c-9fdb-c95eef0128c9 req-4894c864-a2d6-48cb-b42b-21e185a4be0c service nova] Acquired lock "refresh_cache-123426f0-207f-4a57-8211-8fd4e8ea9daf" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1176.504238] env[69475]: DEBUG nova.network.neutron [req-a5e0c4f9-16bc-4f8c-9fdb-c95eef0128c9 req-4894c864-a2d6-48cb-b42b-21e185a4be0c service nova] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Refreshing network info cache for port fa01b86b-5802-422e-b736-87395230d146 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1176.505679] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:49:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19598cc1-e105-4565-906a-09dde75e3fbe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fa01b86b-5802-422e-b736-87395230d146', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1176.513491] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1176.516420] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1176.516912] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-232938e9-a8d7-4af0-a6b4-1db8b8d37184 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.537804] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1176.537804] env[69475]: value = "task-3509063" [ 1176.537804] env[69475]: _type = "Task" [ 1176.537804] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.546730] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509063, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1176.597072] env[69475]: DEBUG oslo_concurrency.lockutils [None req-484230f6-dc1c-4300-baef-b0961bb41a31 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 20.720s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.611596] env[69475]: DEBUG nova.network.neutron [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Port 81121438-ec92-4519-97f1-e2a871109623 binding to destination host cpu-1 is already ACTIVE {{(pid=69475) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1176.612091] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1176.612091] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1176.612282] env[69475]: DEBUG nova.network.neutron [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1176.734755] env[69475]: DEBUG nova.network.neutron [req-a5e0c4f9-16bc-4f8c-9fdb-c95eef0128c9 req-4894c864-a2d6-48cb-b42b-21e185a4be0c service nova] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Updated VIF entry in instance network info cache for port fa01b86b-5802-422e-b736-87395230d146. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1176.735044] env[69475]: DEBUG nova.network.neutron [req-a5e0c4f9-16bc-4f8c-9fdb-c95eef0128c9 req-4894c864-a2d6-48cb-b42b-21e185a4be0c service nova] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Updating instance_info_cache with network_info: [{"id": "fa01b86b-5802-422e-b736-87395230d146", "address": "fa:16:3e:55:49:ed", "network": {"id": "db87e30f-4049-4761-b39b-e1f9d0971d14", "bridge": "br-int", "label": "tempest-ServersTestJSON-1989063716-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "52941494ff1643f6bb75cc1320a86b88", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19598cc1-e105-4565-906a-09dde75e3fbe", "external-id": "nsx-vlan-transportzone-371", "segmentation_id": 371, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfa01b86b-58", "ovs_interfaceid": "fa01b86b-5802-422e-b736-87395230d146", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.821838] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e07a94-49f7-534b-5c5f-54b02c4210ed, 'name': SearchDatastore_Task, 'duration_secs': 0.008384} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1176.822671] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bff23050-e3e3-400b-a5e7-ed77b4b465b5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.829033] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1176.829033] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52817e12-1088-d3d5-befa-3abb5b22bb0d" [ 1176.829033] env[69475]: _type = "Task" [ 1176.829033] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1176.835949] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52817e12-1088-d3d5-befa-3abb5b22bb0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.048357] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509063, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.191039] env[69475]: DEBUG oslo_concurrency.lockutils [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "interface-579b4d3e-bd76-4f5d-b972-7b289bca04a0-1ef95bec-a8fb-4ee7-b99a-299bf62af225" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1177.191039] env[69475]: DEBUG oslo_concurrency.lockutils [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-579b4d3e-bd76-4f5d-b972-7b289bca04a0-1ef95bec-a8fb-4ee7-b99a-299bf62af225" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1177.191039] env[69475]: DEBUG nova.objects.instance [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lazy-loading 'flavor' on Instance uuid 579b4d3e-bd76-4f5d-b972-7b289bca04a0 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1177.238833] env[69475]: DEBUG oslo_concurrency.lockutils [req-a5e0c4f9-16bc-4f8c-9fdb-c95eef0128c9 req-4894c864-a2d6-48cb-b42b-21e185a4be0c service nova] Releasing lock "refresh_cache-123426f0-207f-4a57-8211-8fd4e8ea9daf" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1177.319397] env[69475]: DEBUG nova.network.neutron [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating instance_info_cache with network_info: [{"id": "81121438-ec92-4519-97f1-e2a871109623", "address": "fa:16:3e:98:9c:8a", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81121438-ec", "ovs_interfaceid": "81121438-ec92-4519-97f1-e2a871109623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1177.339364] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52817e12-1088-d3d5-befa-3abb5b22bb0d, 'name': SearchDatastore_Task, 'duration_secs': 0.009123} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.339654] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1177.339864] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] b6a785b0-7ae8-4856-b5a8-e017cfd376d8/b6a785b0-7ae8-4856-b5a8-e017cfd376d8.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1177.341058] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a8f2b793-20db-4dc1-b25d-f1cec9f08a01 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.348298] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1177.348298] env[69475]: value = "task-3509064" [ 1177.348298] env[69475]: _type = "Task" [ 1177.348298] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.356287] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509064, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.548315] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509063, 'name': CreateVM_Task, 'duration_secs': 0.662711} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.548482] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1177.549165] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1177.549336] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1177.549652] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1177.549916] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0dbe56a2-6093-4ced-be81-afb24e5dfcc2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.554572] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1177.554572] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524cf07d-47b7-7bb3-cd8c-1c92877193cf" [ 1177.554572] env[69475]: _type = "Task" [ 1177.554572] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.562159] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524cf07d-47b7-7bb3-cd8c-1c92877193cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.780752] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Volume attach. Driver type: vmdk {{(pid=69475) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1177.780990] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701146', 'volume_id': '3aa644aa-04af-438a-bdbf-5764402265a2', 'name': 'volume-3aa644aa-04af-438a-bdbf-5764402265a2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6960992f-a4dd-4a5d-abb8-ff7ae8a414b8', 'attached_at': '', 'detached_at': '', 'volume_id': '3aa644aa-04af-438a-bdbf-5764402265a2', 'serial': '3aa644aa-04af-438a-bdbf-5764402265a2'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1177.781889] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2d079b-d049-4a5c-9891-b67734a1025e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.799470] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca121db-e3ed-4f91-91ac-258fdd580c73 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.826867] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] volume-3aa644aa-04af-438a-bdbf-5764402265a2/volume-3aa644aa-04af-438a-bdbf-5764402265a2.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1177.828040] env[69475]: DEBUG nova.objects.instance [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lazy-loading 'pci_requests' on Instance uuid 579b4d3e-bd76-4f5d-b972-7b289bca04a0 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1177.829792] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1177.831048] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c72ed3c1-fa17-4b10-9df4-5421429c1933 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.847625] env[69475]: DEBUG nova.compute.manager [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69475) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1177.849688] env[69475]: DEBUG nova.objects.base [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Object Instance<579b4d3e-bd76-4f5d-b972-7b289bca04a0> lazy-loaded attributes: flavor,pci_requests {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1177.849893] env[69475]: DEBUG nova.network.neutron [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1177.861340] env[69475]: DEBUG oslo_vmware.api [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1177.861340] env[69475]: value = "task-3509065" [ 1177.861340] env[69475]: _type = "Task" [ 1177.861340] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.868146] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509064, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.873649] env[69475]: DEBUG oslo_vmware.api [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509065, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.922140] env[69475]: DEBUG nova.policy [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc345af1a2c34fba98fa191b637a284a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2ba1a4125454d39bc92b6123447d98a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1178.065988] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524cf07d-47b7-7bb3-cd8c-1c92877193cf, 'name': SearchDatastore_Task, 'duration_secs': 0.009704} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.066317] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1178.066586] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1178.067182] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1178.067182] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1178.067331] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1178.067507] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cfc8a047-a70c-4095-943e-a7337e4b57e8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.076047] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1178.076249] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1178.077056] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-59c2251e-c6e1-4f1d-bc34-351568bac76f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.082220] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1178.082220] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525a54fe-98ad-b7a1-0ba1-c6c381bd03bd" [ 1178.082220] env[69475]: _type = "Task" [ 1178.082220] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.089891] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525a54fe-98ad-b7a1-0ba1-c6c381bd03bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.362775] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509064, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.640979} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.365658] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] b6a785b0-7ae8-4856-b5a8-e017cfd376d8/b6a785b0-7ae8-4856-b5a8-e017cfd376d8.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1178.365863] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1178.366111] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fce29b6b-537a-40f1-b21a-4cdacb679268 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.372689] env[69475]: DEBUG oslo_vmware.api [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509065, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.373782] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1178.373782] env[69475]: value = "task-3509066" [ 1178.373782] env[69475]: _type = "Task" [ 1178.373782] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.380801] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509066, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.592085] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525a54fe-98ad-b7a1-0ba1-c6c381bd03bd, 'name': SearchDatastore_Task, 'duration_secs': 0.008451} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.592847] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19b2d31b-6970-49e6-8941-f521ff652b46 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.597949] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1178.597949] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522052f2-075c-1226-71e5-06498db58b41" [ 1178.597949] env[69475]: _type = "Task" [ 1178.597949] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.605073] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522052f2-075c-1226-71e5-06498db58b41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.872569] env[69475]: DEBUG oslo_vmware.api [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509065, 'name': ReconfigVM_Task, 'duration_secs': 0.766574} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.872849] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Reconfigured VM instance instance-00000071 to attach disk [datastore1] volume-3aa644aa-04af-438a-bdbf-5764402265a2/volume-3aa644aa-04af-438a-bdbf-5764402265a2.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1178.877653] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d56548af-9c9a-4d4f-ace3-661425ec2f9a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.899113] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509066, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067489} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.900035] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1178.900420] env[69475]: DEBUG oslo_vmware.api [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1178.900420] env[69475]: value = "task-3509067" [ 1178.900420] env[69475]: _type = "Task" [ 1178.900420] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.901084] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fdaa186-8b8b-4170-bb9e-e748074d0387 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.912550] env[69475]: DEBUG oslo_vmware.api [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509067, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.930159] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] b6a785b0-7ae8-4856-b5a8-e017cfd376d8/b6a785b0-7ae8-4856-b5a8-e017cfd376d8.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1178.930444] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-35f6b312-28fa-460e-b48d-4ac5aab4b123 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.951064] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1178.951064] env[69475]: value = "task-3509068" [ 1178.951064] env[69475]: _type = "Task" [ 1178.951064] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.956485] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1178.956779] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1178.961361] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509068, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.109155] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522052f2-075c-1226-71e5-06498db58b41, 'name': SearchDatastore_Task, 'duration_secs': 0.009793} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.109539] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1179.109659] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 123426f0-207f-4a57-8211-8fd4e8ea9daf/123426f0-207f-4a57-8211-8fd4e8ea9daf.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1179.109930] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8ce07bf0-b575-4d68-8687-47ecd4777781 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.116473] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1179.116473] env[69475]: value = "task-3509069" [ 1179.116473] env[69475]: _type = "Task" [ 1179.116473] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.124168] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509069, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.285677] env[69475]: DEBUG nova.compute.manager [req-4f32748e-a19b-4a2b-b0e9-aafa50a4d67e req-2eb51dd3-6721-46c9-bad5-81dbf5a3a766 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Received event network-vif-plugged-1ef95bec-a8fb-4ee7-b99a-299bf62af225 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1179.285985] env[69475]: DEBUG oslo_concurrency.lockutils [req-4f32748e-a19b-4a2b-b0e9-aafa50a4d67e req-2eb51dd3-6721-46c9-bad5-81dbf5a3a766 service nova] Acquiring lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.286163] env[69475]: DEBUG oslo_concurrency.lockutils [req-4f32748e-a19b-4a2b-b0e9-aafa50a4d67e req-2eb51dd3-6721-46c9-bad5-81dbf5a3a766 service nova] Lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1179.286350] env[69475]: DEBUG oslo_concurrency.lockutils [req-4f32748e-a19b-4a2b-b0e9-aafa50a4d67e req-2eb51dd3-6721-46c9-bad5-81dbf5a3a766 service nova] Lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1179.286515] env[69475]: DEBUG nova.compute.manager [req-4f32748e-a19b-4a2b-b0e9-aafa50a4d67e req-2eb51dd3-6721-46c9-bad5-81dbf5a3a766 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] No waiting events found dispatching network-vif-plugged-1ef95bec-a8fb-4ee7-b99a-299bf62af225 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1179.286701] env[69475]: WARNING nova.compute.manager [req-4f32748e-a19b-4a2b-b0e9-aafa50a4d67e req-2eb51dd3-6721-46c9-bad5-81dbf5a3a766 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Received unexpected event network-vif-plugged-1ef95bec-a8fb-4ee7-b99a-299bf62af225 for instance with vm_state active and task_state None. [ 1179.389915] env[69475]: DEBUG nova.network.neutron [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Successfully updated port: 1ef95bec-a8fb-4ee7-b99a-299bf62af225 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1179.416971] env[69475]: DEBUG oslo_vmware.api [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509067, 'name': ReconfigVM_Task, 'duration_secs': 0.200323} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.417307] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701146', 'volume_id': '3aa644aa-04af-438a-bdbf-5764402265a2', 'name': 'volume-3aa644aa-04af-438a-bdbf-5764402265a2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6960992f-a4dd-4a5d-abb8-ff7ae8a414b8', 'attached_at': '', 'detached_at': '', 'volume_id': '3aa644aa-04af-438a-bdbf-5764402265a2', 'serial': '3aa644aa-04af-438a-bdbf-5764402265a2'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1179.461656] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509068, 'name': ReconfigVM_Task, 'duration_secs': 0.499865} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.462166] env[69475]: DEBUG nova.objects.instance [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lazy-loading 'migration_context' on Instance uuid 460d4b93-b18a-4965-9e2b-8c6175ccc91f {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1179.463538] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Reconfigured VM instance instance-00000077 to attach disk [datastore1] b6a785b0-7ae8-4856-b5a8-e017cfd376d8/b6a785b0-7ae8-4856-b5a8-e017cfd376d8.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1179.464811] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7e9a866a-ffc8-4880-a143-5fff2bf6bfac {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.473050] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1179.473050] env[69475]: value = "task-3509070" [ 1179.473050] env[69475]: _type = "Task" [ 1179.473050] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.483290] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509070, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.628929] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509069, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.893297] env[69475]: DEBUG oslo_concurrency.lockutils [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1179.893297] env[69475]: DEBUG oslo_concurrency.lockutils [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1179.893297] env[69475]: DEBUG nova.network.neutron [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1179.985495] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509070, 'name': Rename_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.127520] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509069, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57259} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.127816] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 123426f0-207f-4a57-8211-8fd4e8ea9daf/123426f0-207f-4a57-8211-8fd4e8ea9daf.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1180.127984] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1180.128241] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b3d9e26b-715d-4c29-97c0-496257167f3f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.133387] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20053b61-e2fc-44f4-8cd0-e9346be30e14 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.136834] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1180.136834] env[69475]: value = "task-3509071" [ 1180.136834] env[69475]: _type = "Task" [ 1180.136834] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.142926] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a32f24-d51c-40d3-8a15-7e25b0a4f592 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.149111] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509071, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.176310] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09f39731-37a5-4799-9d39-c33ac06d7411 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.183640] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-187ab457-cd14-4712-a6e7-fda04a5531a6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.196691] env[69475]: DEBUG nova.compute.provider_tree [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1180.430646] env[69475]: WARNING nova.network.neutron [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] 801aee55-f715-4cdf-b89c-184ca3f24866 already exists in list: networks containing: ['801aee55-f715-4cdf-b89c-184ca3f24866']. ignoring it [ 1180.455585] env[69475]: DEBUG nova.objects.instance [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lazy-loading 'flavor' on Instance uuid 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1180.485787] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509070, 'name': Rename_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.647971] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509071, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068866} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.648351] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1180.649180] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-111535fa-ddfb-47ca-bcfb-3e71e120c07d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.673136] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] 123426f0-207f-4a57-8211-8fd4e8ea9daf/123426f0-207f-4a57-8211-8fd4e8ea9daf.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1180.673458] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be9f0b8d-f828-4ba5-8f1d-955488e7869b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.693164] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1180.693164] env[69475]: value = "task-3509072" [ 1180.693164] env[69475]: _type = "Task" [ 1180.693164] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.697123] env[69475]: DEBUG nova.network.neutron [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Updating instance_info_cache with network_info: [{"id": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "address": "fa:16:3e:74:59:f0", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf953a932-b0", "ovs_interfaceid": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ef95bec-a8fb-4ee7-b99a-299bf62af225", "address": "fa:16:3e:26:86:b5", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ef95bec-a8", "ovs_interfaceid": "1ef95bec-a8fb-4ee7-b99a-299bf62af225", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1180.699921] env[69475]: DEBUG nova.scheduler.client.report [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1180.706075] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509072, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.960841] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f12b4666-6d08-48f4-9d06-a3d4e1ef1787 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.795s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1180.986427] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509070, 'name': Rename_Task, 'duration_secs': 1.232573} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.986711] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1180.986953] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1aedb9b0-248c-4d63-a57d-4c38990e48d0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.994262] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1180.994262] env[69475]: value = "task-3509073" [ 1180.994262] env[69475]: _type = "Task" [ 1180.994262] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.002491] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509073, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.200489] env[69475]: DEBUG oslo_concurrency.lockutils [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1181.201328] env[69475]: DEBUG oslo_concurrency.lockutils [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.201510] env[69475]: DEBUG oslo_concurrency.lockutils [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1181.205646] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb455351-b605-43a8-aad5-523e1a70d798 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.208273] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509072, 'name': ReconfigVM_Task, 'duration_secs': 0.461421} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.212146] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Reconfigured VM instance instance-00000078 to attach disk [datastore2] 123426f0-207f-4a57-8211-8fd4e8ea9daf/123426f0-207f-4a57-8211-8fd4e8ea9daf.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1181.213460] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23793088-ce74-41f6-8224-d58587e25b5e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.227494] env[69475]: DEBUG nova.virt.hardware [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1181.227722] env[69475]: DEBUG nova.virt.hardware [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1181.227880] env[69475]: DEBUG nova.virt.hardware [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1181.228074] env[69475]: DEBUG nova.virt.hardware [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1181.228224] env[69475]: DEBUG nova.virt.hardware [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1181.228394] env[69475]: DEBUG nova.virt.hardware [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1181.228610] env[69475]: DEBUG nova.virt.hardware [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1181.228793] env[69475]: DEBUG nova.virt.hardware [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1181.228920] env[69475]: DEBUG nova.virt.hardware [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1181.229109] env[69475]: DEBUG nova.virt.hardware [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1181.229285] env[69475]: DEBUG nova.virt.hardware [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1181.236051] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Reconfiguring VM to attach interface {{(pid=69475) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1181.239201] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-71e8b8bc-5cee-4e94-abf5-3df941dd34a3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.251747] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1181.251747] env[69475]: value = "task-3509074" [ 1181.251747] env[69475]: _type = "Task" [ 1181.251747] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.262579] env[69475]: DEBUG oslo_vmware.api [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1181.262579] env[69475]: value = "task-3509075" [ 1181.262579] env[69475]: _type = "Task" [ 1181.262579] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.267722] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509074, 'name': Rename_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.279139] env[69475]: DEBUG oslo_vmware.api [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509075, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.488856] env[69475]: DEBUG nova.compute.manager [req-995cb894-2da3-4b5b-b3b1-b3d4aca6e322 req-49de49ff-6167-4540-bfb9-8f9a2bf6240c service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Received event network-changed-1ef95bec-a8fb-4ee7-b99a-299bf62af225 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1181.489105] env[69475]: DEBUG nova.compute.manager [req-995cb894-2da3-4b5b-b3b1-b3d4aca6e322 req-49de49ff-6167-4540-bfb9-8f9a2bf6240c service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Refreshing instance network info cache due to event network-changed-1ef95bec-a8fb-4ee7-b99a-299bf62af225. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1181.489340] env[69475]: DEBUG oslo_concurrency.lockutils [req-995cb894-2da3-4b5b-b3b1-b3d4aca6e322 req-49de49ff-6167-4540-bfb9-8f9a2bf6240c service nova] Acquiring lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1181.489477] env[69475]: DEBUG oslo_concurrency.lockutils [req-995cb894-2da3-4b5b-b3b1-b3d4aca6e322 req-49de49ff-6167-4540-bfb9-8f9a2bf6240c service nova] Acquired lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1181.490008] env[69475]: DEBUG nova.network.neutron [req-995cb894-2da3-4b5b-b3b1-b3d4aca6e322 req-49de49ff-6167-4540-bfb9-8f9a2bf6240c service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Refreshing network info cache for port 1ef95bec-a8fb-4ee7-b99a-299bf62af225 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1181.505965] env[69475]: DEBUG oslo_vmware.api [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509073, 'name': PowerOnVM_Task, 'duration_secs': 0.488129} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.506250] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1181.506445] env[69475]: INFO nova.compute.manager [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Took 9.79 seconds to spawn the instance on the hypervisor. [ 1181.506658] env[69475]: DEBUG nova.compute.manager [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1181.508221] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258a3235-08cc-4a73-bb0e-0b1716a54d46 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.715391] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.758s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.760530] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e5f2acd-a3a3-460e-bba1-7c78416334c1 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.760831] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e5f2acd-a3a3-460e-bba1-7c78416334c1 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.768563] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509074, 'name': Rename_Task, 'duration_secs': 0.162902} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1181.771674] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1181.772571] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-67826103-5905-476f-9621-c0e70395d3ad {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.780665] env[69475]: DEBUG oslo_vmware.api [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1181.782293] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1181.782293] env[69475]: value = "task-3509076" [ 1181.782293] env[69475]: _type = "Task" [ 1181.782293] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1181.793366] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509076, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.029913] env[69475]: INFO nova.compute.manager [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Took 14.61 seconds to build instance. [ 1182.184340] env[69475]: DEBUG nova.network.neutron [req-995cb894-2da3-4b5b-b3b1-b3d4aca6e322 req-49de49ff-6167-4540-bfb9-8f9a2bf6240c service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Updated VIF entry in instance network info cache for port 1ef95bec-a8fb-4ee7-b99a-299bf62af225. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1182.184788] env[69475]: DEBUG nova.network.neutron [req-995cb894-2da3-4b5b-b3b1-b3d4aca6e322 req-49de49ff-6167-4540-bfb9-8f9a2bf6240c service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Updating instance_info_cache with network_info: [{"id": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "address": "fa:16:3e:74:59:f0", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf953a932-b0", "ovs_interfaceid": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ef95bec-a8fb-4ee7-b99a-299bf62af225", "address": "fa:16:3e:26:86:b5", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ef95bec-a8", "ovs_interfaceid": "1ef95bec-a8fb-4ee7-b99a-299bf62af225", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1182.264030] env[69475]: INFO nova.compute.manager [None req-1e5f2acd-a3a3-460e-bba1-7c78416334c1 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Detaching volume 3aa644aa-04af-438a-bdbf-5764402265a2 [ 1182.282508] env[69475]: DEBUG oslo_vmware.api [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509075, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.291303] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509076, 'name': PowerOnVM_Task} progress is 71%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.299868] env[69475]: INFO nova.virt.block_device [None req-1e5f2acd-a3a3-460e-bba1-7c78416334c1 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Attempting to driver detach volume 3aa644aa-04af-438a-bdbf-5764402265a2 from mountpoint /dev/sdb [ 1182.300104] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e5f2acd-a3a3-460e-bba1-7c78416334c1 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1182.300303] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e5f2acd-a3a3-460e-bba1-7c78416334c1 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701146', 'volume_id': '3aa644aa-04af-438a-bdbf-5764402265a2', 'name': 'volume-3aa644aa-04af-438a-bdbf-5764402265a2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6960992f-a4dd-4a5d-abb8-ff7ae8a414b8', 'attached_at': '', 'detached_at': '', 'volume_id': '3aa644aa-04af-438a-bdbf-5764402265a2', 'serial': '3aa644aa-04af-438a-bdbf-5764402265a2'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1182.301367] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e25ecc36-546f-492e-a45c-6ea50691c898 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.323182] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce801a0b-9c27-4a66-b385-3b2cfe484319 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.330349] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86506a84-ec6f-405d-a7da-bf23571ea702 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.351515] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4cf7202-4ffa-4e0b-bfb9-52219b9169f3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.366426] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e5f2acd-a3a3-460e-bba1-7c78416334c1 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] The volume has not been displaced from its original location: [datastore1] volume-3aa644aa-04af-438a-bdbf-5764402265a2/volume-3aa644aa-04af-438a-bdbf-5764402265a2.vmdk. No consolidation needed. {{(pid=69475) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1182.371772] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e5f2acd-a3a3-460e-bba1-7c78416334c1 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Reconfiguring VM instance instance-00000071 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1182.372074] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0dd09af3-f79f-4b61-aaaa-c549781077a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.391400] env[69475]: DEBUG oslo_vmware.api [None req-1e5f2acd-a3a3-460e-bba1-7c78416334c1 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1182.391400] env[69475]: value = "task-3509077" [ 1182.391400] env[69475]: _type = "Task" [ 1182.391400] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.400126] env[69475]: DEBUG oslo_vmware.api [None req-1e5f2acd-a3a3-460e-bba1-7c78416334c1 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509077, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.528620] env[69475]: DEBUG nova.compute.manager [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Stashing vm_state: active {{(pid=69475) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1182.532818] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ef8841ae-2c98-41eb-8493-83af8370d6a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "b6a785b0-7ae8-4856-b5a8-e017cfd376d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.119s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1182.688368] env[69475]: DEBUG oslo_concurrency.lockutils [req-995cb894-2da3-4b5b-b3b1-b3d4aca6e322 req-49de49ff-6167-4540-bfb9-8f9a2bf6240c service nova] Releasing lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1182.784113] env[69475]: DEBUG oslo_vmware.api [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509075, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.794198] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509076, 'name': PowerOnVM_Task} progress is 71%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1182.904836] env[69475]: DEBUG oslo_vmware.api [None req-1e5f2acd-a3a3-460e-bba1-7c78416334c1 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509077, 'name': ReconfigVM_Task, 'duration_secs': 0.325864} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1182.905138] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e5f2acd-a3a3-460e-bba1-7c78416334c1 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Reconfigured VM instance instance-00000071 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1182.909919] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-604457eb-899c-44f1-95d7-0f3c48ffbcd3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1182.926556] env[69475]: DEBUG oslo_vmware.api [None req-1e5f2acd-a3a3-460e-bba1-7c78416334c1 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1182.926556] env[69475]: value = "task-3509078" [ 1182.926556] env[69475]: _type = "Task" [ 1182.926556] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1182.935062] env[69475]: DEBUG oslo_vmware.api [None req-1e5f2acd-a3a3-460e-bba1-7c78416334c1 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509078, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1183.052343] env[69475]: DEBUG oslo_concurrency.lockutils [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.052757] env[69475]: DEBUG oslo_concurrency.lockutils [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.252636] env[69475]: INFO nova.compute.manager [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Swapping old allocation on dict_keys(['dd221100-68c1-4a75-92b5-b24d81fee5da']) held by migration 96c70506-3255-4191-9f79-d5b05c450614 for instance [ 1183.276613] env[69475]: DEBUG nova.scheduler.client.report [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Overwriting current allocation {'allocations': {'dd221100-68c1-4a75-92b5-b24d81fee5da': {'resources': {'VCPU': 1, 'MEMORY_MB': 256, 'DISK_GB': 1}, 'generation': 171}}, 'project_id': 'ca5098b4aae94c08b3f8ffd66aae2e2c', 'user_id': 'a123051be3624b50ab42a4254f687767', 'consumer_generation': 1} on consumer 460d4b93-b18a-4965-9e2b-8c6175ccc91f {{(pid=69475) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1183.284626] env[69475]: DEBUG oslo_vmware.api [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509075, 'name': ReconfigVM_Task, 'duration_secs': 1.630854} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.287781] env[69475]: DEBUG oslo_concurrency.lockutils [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1183.287998] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Reconfigured VM to attach interface {{(pid=69475) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1183.296174] env[69475]: DEBUG oslo_vmware.api [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509076, 'name': PowerOnVM_Task, 'duration_secs': 1.336176} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.296174] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1183.296174] env[69475]: INFO nova.compute.manager [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Took 8.80 seconds to spawn the instance on the hypervisor. [ 1183.296174] env[69475]: DEBUG nova.compute.manager [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1183.296958] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6da01d2b-3e0e-440d-ba9e-2f3352e4d483 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.315393] env[69475]: DEBUG nova.compute.manager [req-94f012f0-2fad-484a-acda-740a34ae3a38 req-059bf3a9-5d8d-4875-b4db-c68fb507c97f service nova] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Received event network-changed-cc57d7c3-7051-4e4b-95c6-c1bffe25471e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1183.315642] env[69475]: DEBUG nova.compute.manager [req-94f012f0-2fad-484a-acda-740a34ae3a38 req-059bf3a9-5d8d-4875-b4db-c68fb507c97f service nova] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Refreshing instance network info cache due to event network-changed-cc57d7c3-7051-4e4b-95c6-c1bffe25471e. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1183.315870] env[69475]: DEBUG oslo_concurrency.lockutils [req-94f012f0-2fad-484a-acda-740a34ae3a38 req-059bf3a9-5d8d-4875-b4db-c68fb507c97f service nova] Acquiring lock "refresh_cache-b6a785b0-7ae8-4856-b5a8-e017cfd376d8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.316051] env[69475]: DEBUG oslo_concurrency.lockutils [req-94f012f0-2fad-484a-acda-740a34ae3a38 req-059bf3a9-5d8d-4875-b4db-c68fb507c97f service nova] Acquired lock "refresh_cache-b6a785b0-7ae8-4856-b5a8-e017cfd376d8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1183.316177] env[69475]: DEBUG nova.network.neutron [req-94f012f0-2fad-484a-acda-740a34ae3a38 req-059bf3a9-5d8d-4875-b4db-c68fb507c97f service nova] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Refreshing network info cache for port cc57d7c3-7051-4e4b-95c6-c1bffe25471e {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1183.360611] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1183.360806] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1183.360987] env[69475]: DEBUG nova.network.neutron [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1183.437115] env[69475]: DEBUG oslo_vmware.api [None req-1e5f2acd-a3a3-460e-bba1-7c78416334c1 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509078, 'name': ReconfigVM_Task, 'duration_secs': 0.209472} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1183.437420] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e5f2acd-a3a3-460e-bba1-7c78416334c1 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701146', 'volume_id': '3aa644aa-04af-438a-bdbf-5764402265a2', 'name': 'volume-3aa644aa-04af-438a-bdbf-5764402265a2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6960992f-a4dd-4a5d-abb8-ff7ae8a414b8', 'attached_at': '', 'detached_at': '', 'volume_id': '3aa644aa-04af-438a-bdbf-5764402265a2', 'serial': '3aa644aa-04af-438a-bdbf-5764402265a2'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1183.558145] env[69475]: INFO nova.compute.claims [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1183.793029] env[69475]: DEBUG oslo_concurrency.lockutils [None req-494c6a25-b1bf-408d-99ff-cc465c0189d7 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-579b4d3e-bd76-4f5d-b972-7b289bca04a0-1ef95bec-a8fb-4ee7-b99a-299bf62af225" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.602s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.814257] env[69475]: INFO nova.compute.manager [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Took 13.64 seconds to build instance. [ 1183.989331] env[69475]: DEBUG nova.objects.instance [None req-1e5f2acd-a3a3-460e-bba1-7c78416334c1 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lazy-loading 'flavor' on Instance uuid 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1184.064892] env[69475]: INFO nova.compute.resource_tracker [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updating resource usage from migration 64adaee0-7956-4547-b9fa-ad36031552dd [ 1184.083136] env[69475]: DEBUG nova.network.neutron [req-94f012f0-2fad-484a-acda-740a34ae3a38 req-059bf3a9-5d8d-4875-b4db-c68fb507c97f service nova] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Updated VIF entry in instance network info cache for port cc57d7c3-7051-4e4b-95c6-c1bffe25471e. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1184.083600] env[69475]: DEBUG nova.network.neutron [req-94f012f0-2fad-484a-acda-740a34ae3a38 req-059bf3a9-5d8d-4875-b4db-c68fb507c97f service nova] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Updating instance_info_cache with network_info: [{"id": "cc57d7c3-7051-4e4b-95c6-c1bffe25471e", "address": "fa:16:3e:42:17:b5", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc57d7c3-70", "ovs_interfaceid": "cc57d7c3-7051-4e4b-95c6-c1bffe25471e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.300366] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d88a27-7906-47e5-83c8-7b0e4711b5ac {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.309766] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f69cc3a-5f3c-4387-b383-2d019008b11d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.316451] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2997a8ad-1036-4e52-8177-bab69de7a274 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "123426f0-207f-4a57-8211-8fd4e8ea9daf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.154s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1184.350633] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4673d02-c17b-44f2-a403-ddb327956cf3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.359559] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c897ccf-cfb2-4485-a667-d7bc8ab69fa0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.374748] env[69475]: DEBUG nova.compute.provider_tree [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1184.456226] env[69475]: DEBUG nova.network.neutron [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating instance_info_cache with network_info: [{"id": "81121438-ec92-4519-97f1-e2a871109623", "address": "fa:16:3e:98:9c:8a", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81121438-ec", "ovs_interfaceid": "81121438-ec92-4519-97f1-e2a871109623", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.593213] env[69475]: DEBUG oslo_concurrency.lockutils [req-94f012f0-2fad-484a-acda-740a34ae3a38 req-059bf3a9-5d8d-4875-b4db-c68fb507c97f service nova] Releasing lock "refresh_cache-b6a785b0-7ae8-4856-b5a8-e017cfd376d8" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1184.878077] env[69475]: DEBUG nova.scheduler.client.report [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1184.959310] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "refresh_cache-460d4b93-b18a-4965-9e2b-8c6175ccc91f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1184.960267] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7f846b2-656d-47b1-b2c8-f7140f2f4c69 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.968279] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e536c6-8d29-46d9-8adf-34f8ee753916 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.998286] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1e5f2acd-a3a3-460e-bba1-7c78416334c1 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.237s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.281579] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc51a7f7-18fb-4b00-b100-38df63528745 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "123426f0-207f-4a57-8211-8fd4e8ea9daf" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.281826] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc51a7f7-18fb-4b00-b100-38df63528745 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "123426f0-207f-4a57-8211-8fd4e8ea9daf" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.282017] env[69475]: DEBUG nova.compute.manager [None req-fc51a7f7-18fb-4b00-b100-38df63528745 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1185.282906] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3405448a-0711-4dc0-9982-d9d21c823c2c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.290445] env[69475]: DEBUG nova.compute.manager [None req-fc51a7f7-18fb-4b00-b100-38df63528745 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69475) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1185.291119] env[69475]: DEBUG nova.objects.instance [None req-fc51a7f7-18fb-4b00-b100-38df63528745 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lazy-loading 'flavor' on Instance uuid 123426f0-207f-4a57-8211-8fd4e8ea9daf {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1185.383629] env[69475]: DEBUG oslo_concurrency.lockutils [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.331s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.383933] env[69475]: INFO nova.compute.manager [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Migrating [ 1185.441147] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "interface-579b4d3e-bd76-4f5d-b972-7b289bca04a0-1ef95bec-a8fb-4ee7-b99a-299bf62af225" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.441147] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-579b4d3e-bd76-4f5d-b972-7b289bca04a0-1ef95bec-a8fb-4ee7-b99a-299bf62af225" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.448372] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.448658] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.448876] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.449075] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.449658] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.451299] env[69475]: INFO nova.compute.manager [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Terminating instance [ 1185.900286] env[69475]: DEBUG oslo_concurrency.lockutils [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.900503] env[69475]: DEBUG oslo_concurrency.lockutils [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.900690] env[69475]: DEBUG nova.network.neutron [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1185.944391] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.944609] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.945607] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0540bdb5-ed01-41da-ae20-d56c3d3f90a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.966454] env[69475]: DEBUG nova.compute.manager [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1185.966708] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1185.967643] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0909e7-cabb-4a66-82d1-1139ee57f00f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.970937] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1acf7d52-97d2-4c29-b073-6e33930b0a94 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.000772] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Reconfiguring VM to detach interface {{(pid=69475) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1186.001272] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1186.001708] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11fabe87-7b0f-409c-affa-41981b0dc061 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.014786] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d28e0821-2fa2-4271-bc8d-9835f6ee1c65 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.022794] env[69475]: DEBUG oslo_vmware.api [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1186.022794] env[69475]: value = "task-3509081" [ 1186.022794] env[69475]: _type = "Task" [ 1186.022794] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.024150] env[69475]: DEBUG oslo_vmware.api [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1186.024150] env[69475]: value = "task-3509080" [ 1186.024150] env[69475]: _type = "Task" [ 1186.024150] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.035675] env[69475]: DEBUG oslo_vmware.api [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509081, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.038842] env[69475]: DEBUG oslo_vmware.api [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509080, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.052633] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1186.052936] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67b62078-dae7-4bed-a060-b129b2e89ec4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.060617] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1186.060617] env[69475]: value = "task-3509082" [ 1186.060617] env[69475]: _type = "Task" [ 1186.060617] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.069863] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509082, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.299335] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc51a7f7-18fb-4b00-b100-38df63528745 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1186.299673] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c7e1a36a-a993-4be8-80b9-3b4a0787e227 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.309460] env[69475]: DEBUG oslo_vmware.api [None req-fc51a7f7-18fb-4b00-b100-38df63528745 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1186.309460] env[69475]: value = "task-3509083" [ 1186.309460] env[69475]: _type = "Task" [ 1186.309460] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.317994] env[69475]: DEBUG oslo_vmware.api [None req-fc51a7f7-18fb-4b00-b100-38df63528745 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509083, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.538415] env[69475]: DEBUG oslo_vmware.api [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.541481] env[69475]: DEBUG oslo_vmware.api [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509080, 'name': PowerOffVM_Task, 'duration_secs': 0.182219} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.541733] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1186.541900] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1186.542212] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b768593-cf6c-493b-8eef-bae84a021c0f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.571010] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509082, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.614075] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1186.614393] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1186.614492] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Deleting the datastore file [datastore1] 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1186.614765] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f37b5511-f514-4e7e-a5a1-e86e7cfbf330 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.620674] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.620899] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.623590] env[69475]: DEBUG oslo_vmware.api [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1186.623590] env[69475]: value = "task-3509085" [ 1186.623590] env[69475]: _type = "Task" [ 1186.623590] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.632849] env[69475]: DEBUG oslo_vmware.api [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509085, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.670547] env[69475]: DEBUG nova.network.neutron [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updating instance_info_cache with network_info: [{"id": "5b51cc5d-6e38-423f-8f69-13541ea8a317", "address": "fa:16:3e:cd:15:12", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b51cc5d-6e", "ovs_interfaceid": "5b51cc5d-6e38-423f-8f69-13541ea8a317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.826111] env[69475]: DEBUG oslo_vmware.api [None req-fc51a7f7-18fb-4b00-b100-38df63528745 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509083, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.035379] env[69475]: DEBUG oslo_vmware.api [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.072659] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509082, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.126089] env[69475]: DEBUG nova.compute.utils [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1187.138299] env[69475]: DEBUG oslo_vmware.api [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509085, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.1357} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.138572] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1187.138810] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1187.139031] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1187.139216] env[69475]: INFO nova.compute.manager [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1187.139461] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1187.139685] env[69475]: DEBUG nova.compute.manager [-] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1187.139825] env[69475]: DEBUG nova.network.neutron [-] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1187.173330] env[69475]: DEBUG oslo_concurrency.lockutils [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1187.321242] env[69475]: DEBUG oslo_vmware.api [None req-fc51a7f7-18fb-4b00-b100-38df63528745 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509083, 'name': PowerOffVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.537199] env[69475]: DEBUG oslo_vmware.api [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.573434] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509082, 'name': PowerOffVM_Task, 'duration_secs': 1.361249} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.573981] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1187.574944] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1187.575307] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1187.575763] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1187.576054] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1187.576289] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1187.576556] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1187.576900] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1187.577173] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1187.577456] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1187.577688] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1187.577936] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1187.583827] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb1767b7-f366-4d6b-baa7-eebc50dc1c16 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.601630] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1187.601630] env[69475]: value = "task-3509087" [ 1187.601630] env[69475]: _type = "Task" [ 1187.601630] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1187.611322] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509087, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.632653] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1187.824520] env[69475]: DEBUG oslo_vmware.api [None req-fc51a7f7-18fb-4b00-b100-38df63528745 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509083, 'name': PowerOffVM_Task, 'duration_secs': 1.12249} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.824520] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc51a7f7-18fb-4b00-b100-38df63528745 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1187.824520] env[69475]: DEBUG nova.compute.manager [None req-fc51a7f7-18fb-4b00-b100-38df63528745 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1187.824781] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea1b8a1f-33c3-4e82-91d5-dae1741ca017 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.863991] env[69475]: DEBUG nova.compute.manager [req-dcafd7fd-8ece-42f0-bf26-d6630da5c8b2 req-ba02b6d2-34b8-4bdc-999e-b5e196398547 service nova] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Received event network-vif-deleted-f181f990-1cef-4b68-ae07-ea93c380f5a0 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1187.864291] env[69475]: INFO nova.compute.manager [req-dcafd7fd-8ece-42f0-bf26-d6630da5c8b2 req-ba02b6d2-34b8-4bdc-999e-b5e196398547 service nova] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Neutron deleted interface f181f990-1cef-4b68-ae07-ea93c380f5a0; detaching it from the instance and deleting it from the info cache [ 1187.864472] env[69475]: DEBUG nova.network.neutron [req-dcafd7fd-8ece-42f0-bf26-d6630da5c8b2 req-ba02b6d2-34b8-4bdc-999e-b5e196398547 service nova] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.037797] env[69475]: DEBUG oslo_vmware.api [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.113665] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509087, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.158199] env[69475]: DEBUG nova.network.neutron [-] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1188.340547] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fc51a7f7-18fb-4b00-b100-38df63528745 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "123426f0-207f-4a57-8211-8fd4e8ea9daf" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 3.059s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1188.367882] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ea19bf48-6671-403f-beef-3cba2cd2bfc9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.378144] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1b7bf5-9653-43c3-b7b0-553b146f8fbd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.415668] env[69475]: DEBUG nova.compute.manager [req-dcafd7fd-8ece-42f0-bf26-d6630da5c8b2 req-ba02b6d2-34b8-4bdc-999e-b5e196398547 service nova] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Detach interface failed, port_id=f181f990-1cef-4b68-ae07-ea93c380f5a0, reason: Instance 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1188.539141] env[69475]: DEBUG oslo_vmware.api [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.612116] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509087, 'name': ReconfigVM_Task, 'duration_secs': 0.609832} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.612589] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd3172e-90e5-499d-baa0-0916d8fde812 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.633733] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1188.633998] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1188.634180] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1188.634367] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1188.634519] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1188.634670] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1188.634880] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1188.635054] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1188.635228] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1188.635394] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1188.635596] env[69475]: DEBUG nova.virt.hardware [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1188.636371] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96c1eb8d-1df8-46ff-9f4d-9b0cff3a0aec {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.643227] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1188.643227] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5213f3c7-cfd1-fe48-066d-224b7092ef47" [ 1188.643227] env[69475]: _type = "Task" [ 1188.643227] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.655258] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5213f3c7-cfd1-fe48-066d-224b7092ef47, 'name': SearchDatastore_Task, 'duration_secs': 0.008366} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.660815] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Reconfiguring VM instance instance-0000006b to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1188.661306] env[69475]: INFO nova.compute.manager [-] [instance: 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8] Took 1.52 seconds to deallocate network for instance. [ 1188.661552] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3e0fef95-e21d-461a-a5fe-a20fe58c433b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.683031] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1188.683031] env[69475]: value = "task-3509088" [ 1188.683031] env[69475]: _type = "Task" [ 1188.683031] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.689113] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf21d742-b147-4381-b571-4ef094d04d20 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.694763] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509088, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.695700] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1188.695930] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1188.696159] env[69475]: INFO nova.compute.manager [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Attaching volume 55979385-56fa-4679-83f2-a4ecdaa6c8f3 to /dev/sdb [ 1188.713426] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updating instance '0c1ee654-0d2e-40a8-b9a9-291c6a9ab954' progress to 0 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1188.744270] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eefa436-cf4e-443a-943e-c1527797d111 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.752412] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0479fe-8bdd-43cc-b2b6-5b7d866201e1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.767979] env[69475]: DEBUG nova.virt.block_device [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Updating existing volume attachment record: 95f35386-75d8-478e-af32-1100f03a9d15 {{(pid=69475) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1189.039504] env[69475]: DEBUG oslo_vmware.api [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.181465] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.181838] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.182102] env[69475]: DEBUG nova.objects.instance [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lazy-loading 'resources' on Instance uuid 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1189.195014] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509088, 'name': ReconfigVM_Task, 'duration_secs': 0.362097} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.196316] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Reconfigured VM instance instance-0000006b to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1189.197450] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79d8cc66-5d39-4c4e-b6ed-8626b680c0c0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.224330] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 460d4b93-b18a-4965-9e2b-8c6175ccc91f/460d4b93-b18a-4965-9e2b-8c6175ccc91f.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1189.226084] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1189.226322] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-91a9532c-547a-4e04-81e7-144f7bc6eaa0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.240158] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5755c027-ecc4-4284-b6fc-d24e689eac47 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.248624] env[69475]: DEBUG oslo_vmware.api [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1189.248624] env[69475]: value = "task-3509093" [ 1189.248624] env[69475]: _type = "Task" [ 1189.248624] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.250293] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1189.250293] env[69475]: value = "task-3509094" [ 1189.250293] env[69475]: _type = "Task" [ 1189.250293] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.266174] env[69475]: DEBUG oslo_vmware.api [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509093, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.270580] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509094, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.514227] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "123426f0-207f-4a57-8211-8fd4e8ea9daf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.514511] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "123426f0-207f-4a57-8211-8fd4e8ea9daf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.514743] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "123426f0-207f-4a57-8211-8fd4e8ea9daf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.514980] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "123426f0-207f-4a57-8211-8fd4e8ea9daf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.515208] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "123426f0-207f-4a57-8211-8fd4e8ea9daf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1189.517789] env[69475]: INFO nova.compute.manager [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Terminating instance [ 1189.540941] env[69475]: DEBUG oslo_vmware.api [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.766200] env[69475]: DEBUG oslo_vmware.api [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509093, 'name': PowerOffVM_Task, 'duration_secs': 0.202827} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.769131] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1189.769331] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updating instance '0c1ee654-0d2e-40a8-b9a9-291c6a9ab954' progress to 17 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1189.772711] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509094, 'name': ReconfigVM_Task, 'duration_secs': 0.310991} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.776673] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 460d4b93-b18a-4965-9e2b-8c6175ccc91f/460d4b93-b18a-4965-9e2b-8c6175ccc91f.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1189.778196] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de786a8-f24c-4def-a0e3-74f50edb6233 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.805469] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431dea47-59a9-472f-9eb0-0d4821cafa3a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.840297] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35dbeda8-7938-4d12-a63e-ee79f7e5c31e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.869469] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942deede-15e9-4f61-ac16-dd0ad16630f1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.875480] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1189.875778] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02286322-dd18-49d7-9fbf-254c63bc004d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.884906] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1189.884906] env[69475]: value = "task-3509095" [ 1189.884906] env[69475]: _type = "Task" [ 1189.884906] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.894878] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509095, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.965438] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d07c6cb-53ca-4fb5-9057-7c2ee3160ef0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.974602] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17432eb9-b330-41bc-be6c-f5905106cc6b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.007322] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c7b318-a04f-4fae-a11e-f1c69a407f10 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.016523] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af3a460-6135-40bb-af08-98f52f0d635b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.021058] env[69475]: DEBUG nova.compute.manager [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1190.021283] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1190.022039] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56cf0704-0e29-4e22-b2f9-7430570a583f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.035322] env[69475]: DEBUG nova.compute.provider_tree [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1190.042212] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1190.042817] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0eb72d98-b6c3-4834-acc5-54333a97b463 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.051845] env[69475]: DEBUG oslo_vmware.api [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.116814] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1190.117146] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1190.117391] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleting the datastore file [datastore2] 123426f0-207f-4a57-8211-8fd4e8ea9daf {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1190.117698] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f4a1d7a-351b-4e94-9d14-b12b79056174 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.126861] env[69475]: DEBUG oslo_vmware.api [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1190.126861] env[69475]: value = "task-3509097" [ 1190.126861] env[69475]: _type = "Task" [ 1190.126861] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.135885] env[69475]: DEBUG oslo_vmware.api [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509097, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.276528] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1190.277832] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1190.277832] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1190.277832] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1190.277832] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1190.277832] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1190.277832] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1190.277832] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1190.278180] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1190.278219] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1190.278709] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1190.286024] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e42b1d9-8807-4c18-b1f2-d9d12159c0c6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.303289] env[69475]: DEBUG oslo_vmware.api [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1190.303289] env[69475]: value = "task-3509098" [ 1190.303289] env[69475]: _type = "Task" [ 1190.303289] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.317672] env[69475]: DEBUG oslo_vmware.api [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509098, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.395875] env[69475]: DEBUG oslo_vmware.api [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509095, 'name': PowerOnVM_Task, 'duration_secs': 0.463435} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.396170] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1190.546404] env[69475]: DEBUG nova.scheduler.client.report [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1190.549481] env[69475]: DEBUG oslo_vmware.api [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.637229] env[69475]: DEBUG oslo_vmware.api [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509097, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137044} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.637482] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1190.637669] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1190.637849] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1190.638073] env[69475]: INFO nova.compute.manager [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1190.638325] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1190.638514] env[69475]: DEBUG nova.compute.manager [-] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1190.638609] env[69475]: DEBUG nova.network.neutron [-] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1190.814576] env[69475]: DEBUG oslo_vmware.api [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509098, 'name': ReconfigVM_Task, 'duration_secs': 0.433966} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.814900] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updating instance '0c1ee654-0d2e-40a8-b9a9-291c6a9ab954' progress to 33 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1190.907770] env[69475]: DEBUG nova.compute.manager [req-d3c42caf-e6aa-4f45-98ea-fea4e8e95dea req-2fcc46d7-4ec0-4c0e-b5d9-562b8a2864ac service nova] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Received event network-vif-deleted-fa01b86b-5802-422e-b736-87395230d146 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1190.907970] env[69475]: INFO nova.compute.manager [req-d3c42caf-e6aa-4f45-98ea-fea4e8e95dea req-2fcc46d7-4ec0-4c0e-b5d9-562b8a2864ac service nova] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Neutron deleted interface fa01b86b-5802-422e-b736-87395230d146; detaching it from the instance and deleting it from the info cache [ 1190.908173] env[69475]: DEBUG nova.network.neutron [req-d3c42caf-e6aa-4f45-98ea-fea4e8e95dea req-2fcc46d7-4ec0-4c0e-b5d9-562b8a2864ac service nova] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.047301] env[69475]: DEBUG oslo_vmware.api [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509081, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.051364] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.869s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.071829] env[69475]: INFO nova.scheduler.client.report [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Deleted allocations for instance 6960992f-a4dd-4a5d-abb8-ff7ae8a414b8 [ 1191.321718] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1191.322032] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1191.322255] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1191.322457] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1191.322608] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1191.322760] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1191.322967] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1191.323148] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1191.323478] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1191.323478] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1191.323645] env[69475]: DEBUG nova.virt.hardware [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1191.329877] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Reconfiguring VM instance instance-00000073 to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1191.330409] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7bc11f02-946c-47ae-a40c-14a31ba1d88c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.350957] env[69475]: DEBUG oslo_vmware.api [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1191.350957] env[69475]: value = "task-3509101" [ 1191.350957] env[69475]: _type = "Task" [ 1191.350957] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.359811] env[69475]: DEBUG oslo_vmware.api [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509101, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.388911] env[69475]: DEBUG nova.network.neutron [-] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1191.409923] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c83df0e7-462f-4f88-8b45-4a5146967d0d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.421356] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae6ff88-743b-4f4c-b405-16d3c3614021 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.451458] env[69475]: INFO nova.compute.manager [None req-a38e8445-7240-4738-8ed0-ec4422f76633 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating instance to original state: 'active' [ 1191.467418] env[69475]: DEBUG nova.compute.manager [req-d3c42caf-e6aa-4f45-98ea-fea4e8e95dea req-2fcc46d7-4ec0-4c0e-b5d9-562b8a2864ac service nova] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Detach interface failed, port_id=fa01b86b-5802-422e-b736-87395230d146, reason: Instance 123426f0-207f-4a57-8211-8fd4e8ea9daf could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1191.547180] env[69475]: DEBUG oslo_vmware.api [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509081, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.579874] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a51290d3-7153-4574-9222-f56cd274fb99 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "6960992f-a4dd-4a5d-abb8-ff7ae8a414b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.131s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.861646] env[69475]: DEBUG oslo_vmware.api [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509101, 'name': ReconfigVM_Task, 'duration_secs': 0.170837} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.861938] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Reconfigured VM instance instance-00000073 to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1191.862709] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e25614-2251-4ad5-8f71-abfadbd3b342 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.886390] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954/0c1ee654-0d2e-40a8-b9a9-291c6a9ab954.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1191.886631] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a3f572a-327c-4dc1-a2b1-83ea5f4f18dd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.899349] env[69475]: INFO nova.compute.manager [-] [instance: 123426f0-207f-4a57-8211-8fd4e8ea9daf] Took 1.26 seconds to deallocate network for instance. [ 1191.907058] env[69475]: DEBUG oslo_vmware.api [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1191.907058] env[69475]: value = "task-3509102" [ 1191.907058] env[69475]: _type = "Task" [ 1191.907058] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.915681] env[69475]: DEBUG oslo_vmware.api [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509102, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.048303] env[69475]: DEBUG oslo_vmware.api [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509081, 'name': ReconfigVM_Task, 'duration_secs': 5.758514} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.048550] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.048801] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Reconfigured VM to detach interface {{(pid=69475) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1192.407178] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1192.407463] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1192.407689] env[69475]: DEBUG nova.objects.instance [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lazy-loading 'resources' on Instance uuid 123426f0-207f-4a57-8211-8fd4e8ea9daf {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1192.422233] env[69475]: DEBUG oslo_vmware.api [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509102, 'name': ReconfigVM_Task, 'duration_secs': 0.260051} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.422233] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954/0c1ee654-0d2e-40a8-b9a9-291c6a9ab954.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1192.422532] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updating instance '0c1ee654-0d2e-40a8-b9a9-291c6a9ab954' progress to 50 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1192.929972] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6760cd7e-1865-490e-86aa-e33f82ae067c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.952608] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c67fa4a-1b70-49d9-8a95-71eae2f3f984 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.975281] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updating instance '0c1ee654-0d2e-40a8-b9a9-291c6a9ab954' progress to 67 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1193.153024] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ad044e-bd82-4874-b669-a16b0b9da22f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.162254] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45953d9e-751b-4b97-8cae-9f092c78d766 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.195193] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e78a4f40-6d05-4160-9a6e-c91774fef373 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.203923] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0d917e-c978-4ee6-94b2-e709231b73c2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.217865] env[69475]: DEBUG oslo_concurrency.lockutils [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.218130] env[69475]: DEBUG oslo_concurrency.lockutils [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.218336] env[69475]: DEBUG oslo_concurrency.lockutils [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.218518] env[69475]: DEBUG oslo_concurrency.lockutils [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.218720] env[69475]: DEBUG oslo_concurrency.lockutils [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.220496] env[69475]: DEBUG nova.compute.provider_tree [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1193.221907] env[69475]: INFO nova.compute.manager [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Terminating instance [ 1193.460070] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1557d19a-430a-40c5-b528-3d534ad2a87e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.460535] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1557d19a-430a-40c5-b528-3d534ad2a87e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.540715] env[69475]: DEBUG nova.network.neutron [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Port 5b51cc5d-6e38-423f-8f69-13541ea8a317 binding to destination host cpu-1 is already ACTIVE {{(pid=69475) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1193.631460] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.631634] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1193.631761] env[69475]: DEBUG nova.network.neutron [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1193.725035] env[69475]: DEBUG nova.scheduler.client.report [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1193.729105] env[69475]: DEBUG nova.compute.manager [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1193.729363] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1193.729845] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25b7494b-d542-4ca4-92c1-3b91887e1458 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.739730] env[69475]: DEBUG oslo_vmware.api [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1193.739730] env[69475]: value = "task-3509104" [ 1193.739730] env[69475]: _type = "Task" [ 1193.739730] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.754287] env[69475]: DEBUG oslo_vmware.api [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509104, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.817500] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Volume attach. Driver type: vmdk {{(pid=69475) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1193.817500] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701151', 'volume_id': '55979385-56fa-4679-83f2-a4ecdaa6c8f3', 'name': 'volume-55979385-56fa-4679-83f2-a4ecdaa6c8f3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd63ddc35-06b3-43a2-bdd5-a91cf4047a4b', 'attached_at': '', 'detached_at': '', 'volume_id': '55979385-56fa-4679-83f2-a4ecdaa6c8f3', 'serial': '55979385-56fa-4679-83f2-a4ecdaa6c8f3'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1193.818497] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d36afb-9a22-4e01-a1c0-5ea3847d59f2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.842012] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91f9603b-40f2-4011-ac6f-a8335908b056 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.871037] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] volume-55979385-56fa-4679-83f2-a4ecdaa6c8f3/volume-55979385-56fa-4679-83f2-a4ecdaa6c8f3.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1193.871427] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0617d2ba-3f41-4c5d-824e-666b79855820 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.891455] env[69475]: DEBUG oslo_vmware.api [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1193.891455] env[69475]: value = "task-3509105" [ 1193.891455] env[69475]: _type = "Task" [ 1193.891455] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.901271] env[69475]: DEBUG oslo_vmware.api [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509105, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.965472] env[69475]: INFO nova.compute.manager [None req-1557d19a-430a-40c5-b528-3d534ad2a87e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Detaching volume be3145de-1a5b-4dc5-bbd7-5173190bff83 [ 1194.011220] env[69475]: INFO nova.virt.block_device [None req-1557d19a-430a-40c5-b528-3d534ad2a87e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Attempting to driver detach volume be3145de-1a5b-4dc5-bbd7-5173190bff83 from mountpoint /dev/sdb [ 1194.011535] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1557d19a-430a-40c5-b528-3d534ad2a87e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1194.011776] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1557d19a-430a-40c5-b528-3d534ad2a87e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701134', 'volume_id': 'be3145de-1a5b-4dc5-bbd7-5173190bff83', 'name': 'volume-be3145de-1a5b-4dc5-bbd7-5173190bff83', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '92020fc6-aff6-437f-9e26-a5b61ea7e76f', 'attached_at': '', 'detached_at': '', 'volume_id': 'be3145de-1a5b-4dc5-bbd7-5173190bff83', 'serial': 'be3145de-1a5b-4dc5-bbd7-5173190bff83'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1194.012797] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-814c30b2-6f14-4138-b21c-6d93d12adb96 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.036718] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f95e79a-5771-40af-add6-e83cf86be661 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.049224] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a24fd0c-1399-4b56-94e1-4d4baf5615bc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.071813] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a881f4f8-7020-42fa-a748-80c8b1ca193c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.089407] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1557d19a-430a-40c5-b528-3d534ad2a87e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] The volume has not been displaced from its original location: [datastore2] volume-be3145de-1a5b-4dc5-bbd7-5173190bff83/volume-be3145de-1a5b-4dc5-bbd7-5173190bff83.vmdk. No consolidation needed. {{(pid=69475) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1194.094931] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1557d19a-430a-40c5-b528-3d534ad2a87e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Reconfiguring VM instance instance-00000069 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1194.095307] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5183c6ef-2019-4702-9cb1-46fd0d605ab2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.115642] env[69475]: DEBUG oslo_vmware.api [None req-1557d19a-430a-40c5-b528-3d534ad2a87e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1194.115642] env[69475]: value = "task-3509106" [ 1194.115642] env[69475]: _type = "Task" [ 1194.115642] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.124552] env[69475]: DEBUG oslo_vmware.api [None req-1557d19a-430a-40c5-b528-3d534ad2a87e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509106, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.231506] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.824s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.258983] env[69475]: DEBUG oslo_vmware.api [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509104, 'name': PowerOffVM_Task, 'duration_secs': 0.344174} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.264155] env[69475]: INFO nova.scheduler.client.report [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleted allocations for instance 123426f0-207f-4a57-8211-8fd4e8ea9daf [ 1194.269494] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1194.269728] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1194.270325] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701141', 'volume_id': 'a6c984d3-67d7-42ec-8b22-82a4405a0b69', 'name': 'volume-a6c984d3-67d7-42ec-8b22-82a4405a0b69', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '460d4b93-b18a-4965-9e2b-8c6175ccc91f', 'attached_at': '2025-04-22T09:45:22.000000', 'detached_at': '', 'volume_id': 'a6c984d3-67d7-42ec-8b22-82a4405a0b69', 'serial': 'a6c984d3-67d7-42ec-8b22-82a4405a0b69'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1194.276020] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ec8572-d1f2-4c01-aa6c-1226e2222382 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.308807] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57dd5edf-f153-46de-b4b9-27c65a140029 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.315941] env[69475]: DEBUG nova.compute.manager [req-a18db44a-7f37-43d0-b4b2-81d64e05cb02 req-bad91903-6d0d-4d2f-893b-533ab6a32904 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Received event network-changed-f953a932-b0a0-4620-ae5b-9a9cda24d9a4 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1194.316269] env[69475]: DEBUG nova.compute.manager [req-a18db44a-7f37-43d0-b4b2-81d64e05cb02 req-bad91903-6d0d-4d2f-893b-533ab6a32904 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Refreshing instance network info cache due to event network-changed-f953a932-b0a0-4620-ae5b-9a9cda24d9a4. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1194.316332] env[69475]: DEBUG oslo_concurrency.lockutils [req-a18db44a-7f37-43d0-b4b2-81d64e05cb02 req-bad91903-6d0d-4d2f-893b-533ab6a32904 service nova] Acquiring lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.340336] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f029675-3385-4f42-8ba8-b42b60394053 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.368353] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4276541-5437-44bc-9019-4f387181a6d7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.387667] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] The volume has not been displaced from its original location: [datastore2] volume-a6c984d3-67d7-42ec-8b22-82a4405a0b69/volume-a6c984d3-67d7-42ec-8b22-82a4405a0b69.vmdk. No consolidation needed. {{(pid=69475) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1194.394254] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Reconfiguring VM instance instance-0000006b to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1194.394254] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7b912c3-cffc-4899-9f0e-0646bcb99da9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.409178] env[69475]: INFO nova.network.neutron [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Port 1ef95bec-a8fb-4ee7-b99a-299bf62af225 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1194.409178] env[69475]: DEBUG nova.network.neutron [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Updating instance_info_cache with network_info: [{"id": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "address": "fa:16:3e:74:59:f0", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf953a932-b0", "ovs_interfaceid": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.418992] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "da3eff39-b80b-4574-9b07-df6f679a9f38" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.419461] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "da3eff39-b80b-4574-9b07-df6f679a9f38" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.427395] env[69475]: DEBUG oslo_vmware.api [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509105, 'name': ReconfigVM_Task, 'duration_secs': 0.378376} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.428974] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Reconfigured VM instance instance-00000074 to attach disk [datastore1] volume-55979385-56fa-4679-83f2-a4ecdaa6c8f3/volume-55979385-56fa-4679-83f2-a4ecdaa6c8f3.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1194.437023] env[69475]: DEBUG oslo_vmware.api [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1194.437023] env[69475]: value = "task-3509107" [ 1194.437023] env[69475]: _type = "Task" [ 1194.437023] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.437023] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-796fcd24-4a5e-4da5-bbad-fdb032cd63da {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.460326] env[69475]: DEBUG oslo_vmware.api [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509107, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.461891] env[69475]: DEBUG oslo_vmware.api [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1194.461891] env[69475]: value = "task-3509108" [ 1194.461891] env[69475]: _type = "Task" [ 1194.461891] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.472176] env[69475]: DEBUG oslo_vmware.api [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509108, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.564750] env[69475]: DEBUG oslo_concurrency.lockutils [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "0c1ee654-0d2e-40a8-b9a9-291c6a9ab954-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.565060] env[69475]: DEBUG oslo_concurrency.lockutils [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "0c1ee654-0d2e-40a8-b9a9-291c6a9ab954-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.565312] env[69475]: DEBUG oslo_concurrency.lockutils [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "0c1ee654-0d2e-40a8-b9a9-291c6a9ab954-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.630458] env[69475]: DEBUG oslo_vmware.api [None req-1557d19a-430a-40c5-b528-3d534ad2a87e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509106, 'name': ReconfigVM_Task, 'duration_secs': 0.275519} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.630960] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1557d19a-430a-40c5-b528-3d534ad2a87e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Reconfigured VM instance instance-00000069 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1194.643404] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51cb1d4f-2fa2-4a28-8b2c-8b71c83db92a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.681304] env[69475]: DEBUG oslo_vmware.api [None req-1557d19a-430a-40c5-b528-3d534ad2a87e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1194.681304] env[69475]: value = "task-3509109" [ 1194.681304] env[69475]: _type = "Task" [ 1194.681304] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.701308] env[69475]: DEBUG oslo_vmware.api [None req-1557d19a-430a-40c5-b528-3d534ad2a87e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509109, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.783265] env[69475]: DEBUG oslo_concurrency.lockutils [None req-a5e4b1d3-08f3-4b09-8010-35f0887a48ab tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "123426f0-207f-4a57-8211-8fd4e8ea9daf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.269s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1194.916149] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1194.918732] env[69475]: DEBUG oslo_concurrency.lockutils [req-a18db44a-7f37-43d0-b4b2-81d64e05cb02 req-bad91903-6d0d-4d2f-893b-533ab6a32904 service nova] Acquired lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.918930] env[69475]: DEBUG nova.network.neutron [req-a18db44a-7f37-43d0-b4b2-81d64e05cb02 req-bad91903-6d0d-4d2f-893b-533ab6a32904 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Refreshing network info cache for port f953a932-b0a0-4620-ae5b-9a9cda24d9a4 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1194.929895] env[69475]: DEBUG nova.compute.manager [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1194.957888] env[69475]: DEBUG oslo_vmware.api [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509107, 'name': ReconfigVM_Task, 'duration_secs': 0.230164} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.958388] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Reconfigured VM instance instance-0000006b to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1194.963190] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-244259bf-8bc0-4923-816d-6071e073e5b0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.983412] env[69475]: DEBUG oslo_vmware.api [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509108, 'name': ReconfigVM_Task, 'duration_secs': 0.255559} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.984721] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701151', 'volume_id': '55979385-56fa-4679-83f2-a4ecdaa6c8f3', 'name': 'volume-55979385-56fa-4679-83f2-a4ecdaa6c8f3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd63ddc35-06b3-43a2-bdd5-a91cf4047a4b', 'attached_at': '', 'detached_at': '', 'volume_id': '55979385-56fa-4679-83f2-a4ecdaa6c8f3', 'serial': '55979385-56fa-4679-83f2-a4ecdaa6c8f3'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1194.986593] env[69475]: DEBUG oslo_vmware.api [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1194.986593] env[69475]: value = "task-3509110" [ 1194.986593] env[69475]: _type = "Task" [ 1194.986593] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.995559] env[69475]: DEBUG oslo_vmware.api [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509110, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.028666] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "interface-eadfee29-c7fc-4d33-8869-7ea8e753554c-1ef95bec-a8fb-4ee7-b99a-299bf62af225" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.028921] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-eadfee29-c7fc-4d33-8869-7ea8e753554c-1ef95bec-a8fb-4ee7-b99a-299bf62af225" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.029359] env[69475]: DEBUG nova.objects.instance [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lazy-loading 'flavor' on Instance uuid eadfee29-c7fc-4d33-8869-7ea8e753554c {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1195.193429] env[69475]: DEBUG oslo_vmware.api [None req-1557d19a-430a-40c5-b528-3d534ad2a87e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509109, 'name': ReconfigVM_Task, 'duration_secs': 0.23845} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.193429] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-1557d19a-430a-40c5-b528-3d534ad2a87e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701134', 'volume_id': 'be3145de-1a5b-4dc5-bbd7-5173190bff83', 'name': 'volume-be3145de-1a5b-4dc5-bbd7-5173190bff83', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '92020fc6-aff6-437f-9e26-a5b61ea7e76f', 'attached_at': '', 'detached_at': '', 'volume_id': 'be3145de-1a5b-4dc5-bbd7-5173190bff83', 'serial': 'be3145de-1a5b-4dc5-bbd7-5173190bff83'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1195.421264] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ce1a4d7a-6709-4c88-b980-1d1f3d87d305 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-579b4d3e-bd76-4f5d-b972-7b289bca04a0-1ef95bec-a8fb-4ee7-b99a-299bf62af225" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.980s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.461967] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.461967] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.466759] env[69475]: INFO nova.compute.claims [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1195.504013] env[69475]: DEBUG oslo_vmware.api [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509110, 'name': ReconfigVM_Task, 'duration_secs': 0.143121} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.505079] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701141', 'volume_id': 'a6c984d3-67d7-42ec-8b22-82a4405a0b69', 'name': 'volume-a6c984d3-67d7-42ec-8b22-82a4405a0b69', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '460d4b93-b18a-4965-9e2b-8c6175ccc91f', 'attached_at': '2025-04-22T09:45:22.000000', 'detached_at': '', 'volume_id': 'a6c984d3-67d7-42ec-8b22-82a4405a0b69', 'serial': 'a6c984d3-67d7-42ec-8b22-82a4405a0b69'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1195.505745] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1195.507296] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95425b7e-7209-4733-b797-59fec7a5f14e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.513088] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "4066a18f-acc5-49b5-941c-0711f29bdcd2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.513343] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "4066a18f-acc5-49b5-941c-0711f29bdcd2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.513558] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "4066a18f-acc5-49b5-941c-0711f29bdcd2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1195.513983] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "4066a18f-acc5-49b5-941c-0711f29bdcd2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1195.513983] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "4066a18f-acc5-49b5-941c-0711f29bdcd2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1195.519709] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1195.520248] env[69475]: INFO nova.compute.manager [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Terminating instance [ 1195.521890] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2e2a1b2c-e50f-4e5b-b27d-b6d29d6ccd33 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.594163] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1195.594404] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1195.594591] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Deleting the datastore file [datastore1] 460d4b93-b18a-4965-9e2b-8c6175ccc91f {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1195.595339] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9a8600e0-4b49-47fe-a67b-ad1c66d3d361 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.605521] env[69475]: DEBUG oslo_vmware.api [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1195.605521] env[69475]: value = "task-3509113" [ 1195.605521] env[69475]: _type = "Task" [ 1195.605521] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.615967] env[69475]: DEBUG oslo_vmware.api [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509113, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.631283] env[69475]: DEBUG nova.objects.instance [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lazy-loading 'pci_requests' on Instance uuid eadfee29-c7fc-4d33-8869-7ea8e753554c {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1195.670166] env[69475]: DEBUG oslo_concurrency.lockutils [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.670407] env[69475]: DEBUG oslo_concurrency.lockutils [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.670557] env[69475]: DEBUG nova.network.neutron [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1195.706302] env[69475]: DEBUG nova.network.neutron [req-a18db44a-7f37-43d0-b4b2-81d64e05cb02 req-bad91903-6d0d-4d2f-893b-533ab6a32904 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Updated VIF entry in instance network info cache for port f953a932-b0a0-4620-ae5b-9a9cda24d9a4. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1195.706702] env[69475]: DEBUG nova.network.neutron [req-a18db44a-7f37-43d0-b4b2-81d64e05cb02 req-bad91903-6d0d-4d2f-893b-533ab6a32904 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Updating instance_info_cache with network_info: [{"id": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "address": "fa:16:3e:74:59:f0", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf953a932-b0", "ovs_interfaceid": "f953a932-b0a0-4620-ae5b-9a9cda24d9a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1195.733892] env[69475]: DEBUG nova.objects.instance [None req-1557d19a-430a-40c5-b528-3d534ad2a87e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lazy-loading 'flavor' on Instance uuid 92020fc6-aff6-437f-9e26-a5b61ea7e76f {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.028604] env[69475]: DEBUG nova.objects.instance [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lazy-loading 'flavor' on Instance uuid d63ddc35-06b3-43a2-bdd5-a91cf4047a4b {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1196.031100] env[69475]: DEBUG nova.compute.manager [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1196.031100] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1196.031602] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9da360c-424b-444e-a209-d1eec1236ccb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.040081] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1196.040316] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3645b544-2637-44ec-b387-0063de070a67 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.048382] env[69475]: DEBUG oslo_vmware.api [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1196.048382] env[69475]: value = "task-3509114" [ 1196.048382] env[69475]: _type = "Task" [ 1196.048382] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.057682] env[69475]: DEBUG oslo_vmware.api [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509114, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.116651] env[69475]: DEBUG oslo_vmware.api [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509113, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192442} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.116893] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1196.117085] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1196.117269] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1196.117443] env[69475]: INFO nova.compute.manager [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Took 2.39 seconds to destroy the instance on the hypervisor. [ 1196.117689] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1196.117868] env[69475]: DEBUG nova.compute.manager [-] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1196.117974] env[69475]: DEBUG nova.network.neutron [-] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1196.134394] env[69475]: DEBUG nova.objects.base [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1196.134606] env[69475]: DEBUG nova.network.neutron [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1196.209713] env[69475]: DEBUG oslo_concurrency.lockutils [req-a18db44a-7f37-43d0-b4b2-81d64e05cb02 req-bad91903-6d0d-4d2f-893b-533ab6a32904 service nova] Releasing lock "refresh_cache-579b4d3e-bd76-4f5d-b972-7b289bca04a0" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1196.291847] env[69475]: DEBUG nova.policy [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'cc345af1a2c34fba98fa191b637a284a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b2ba1a4125454d39bc92b6123447d98a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1196.534618] env[69475]: DEBUG oslo_concurrency.lockutils [None req-2ac7b3f3-3047-406b-b3e6-f34541a8767a tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.838s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.563248] env[69475]: DEBUG oslo_vmware.api [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509114, 'name': PowerOffVM_Task, 'duration_secs': 0.222814} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.563425] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1196.563688] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1196.564577] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c585b0bf-0cf3-4fd7-9552-91c273995ba1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.640296] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1196.640598] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1196.640796] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleting the datastore file [datastore1] 4066a18f-acc5-49b5-941c-0711f29bdcd2 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1196.643435] env[69475]: DEBUG nova.network.neutron [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updating instance_info_cache with network_info: [{"id": "5b51cc5d-6e38-423f-8f69-13541ea8a317", "address": "fa:16:3e:cd:15:12", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b51cc5d-6e", "ovs_interfaceid": "5b51cc5d-6e38-423f-8f69-13541ea8a317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.644685] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a46ba3d9-cfe7-4d8a-8472-022d080dbde3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.657804] env[69475]: DEBUG oslo_vmware.api [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for the task: (returnval){ [ 1196.657804] env[69475]: value = "task-3509116" [ 1196.657804] env[69475]: _type = "Task" [ 1196.657804] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.666536] env[69475]: DEBUG oslo_vmware.api [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509116, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.693558] env[69475]: DEBUG nova.compute.manager [req-2fbf8f9b-0e0b-4d5e-adba-90f21b1f1dec req-3b152177-0d73-4c6b-a409-176132c72abe service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Received event network-changed-6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1196.693558] env[69475]: DEBUG nova.compute.manager [req-2fbf8f9b-0e0b-4d5e-adba-90f21b1f1dec req-3b152177-0d73-4c6b-a409-176132c72abe service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Refreshing instance network info cache due to event network-changed-6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1196.693558] env[69475]: DEBUG oslo_concurrency.lockutils [req-2fbf8f9b-0e0b-4d5e-adba-90f21b1f1dec req-3b152177-0d73-4c6b-a409-176132c72abe service nova] Acquiring lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1196.693558] env[69475]: DEBUG oslo_concurrency.lockutils [req-2fbf8f9b-0e0b-4d5e-adba-90f21b1f1dec req-3b152177-0d73-4c6b-a409-176132c72abe service nova] Acquired lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1196.693558] env[69475]: DEBUG nova.network.neutron [req-2fbf8f9b-0e0b-4d5e-adba-90f21b1f1dec req-3b152177-0d73-4c6b-a409-176132c72abe service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Refreshing network info cache for port 6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1196.736521] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a2765c-f7b6-4248-9022-ab4c9eb8478c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.741924] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1557d19a-430a-40c5-b528-3d534ad2a87e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.281s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.753352] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f22825f-77d4-4d88-aed9-f96f13c75c75 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.791674] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ebe2847-c830-48d7-bb8d-12293db10af0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.801263] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5e8824-62e0-47f4-b522-0180c6d2db6d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.817059] env[69475]: DEBUG nova.compute.provider_tree [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1197.121421] env[69475]: DEBUG nova.compute.manager [req-12ba55ce-ac57-4727-a5af-0a89bab7a75f req-54b107cc-c26a-4d8c-8f08-ab2f773243a8 service nova] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Received event network-vif-deleted-81121438-ec92-4519-97f1-e2a871109623 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1197.121690] env[69475]: INFO nova.compute.manager [req-12ba55ce-ac57-4727-a5af-0a89bab7a75f req-54b107cc-c26a-4d8c-8f08-ab2f773243a8 service nova] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Neutron deleted interface 81121438-ec92-4519-97f1-e2a871109623; detaching it from the instance and deleting it from the info cache [ 1197.121799] env[69475]: DEBUG nova.network.neutron [req-12ba55ce-ac57-4727-a5af-0a89bab7a75f req-54b107cc-c26a-4d8c-8f08-ab2f773243a8 service nova] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.151443] env[69475]: DEBUG oslo_concurrency.lockutils [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1197.168191] env[69475]: DEBUG oslo_vmware.api [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Task: {'id': task-3509116, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136998} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.168830] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1197.169061] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1197.169255] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1197.169429] env[69475]: INFO nova.compute.manager [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1197.169974] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1197.170195] env[69475]: DEBUG nova.compute.manager [-] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1197.170292] env[69475]: DEBUG nova.network.neutron [-] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1197.258708] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1907be6d-d88e-4ba7-a653-6d4f88e1c2cd tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.258988] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1907be6d-d88e-4ba7-a653-6d4f88e1c2cd tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.259212] env[69475]: DEBUG nova.compute.manager [None req-1907be6d-d88e-4ba7-a653-6d4f88e1c2cd tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1197.260104] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f395ee-ad72-4503-a600-6791b052d48a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.267541] env[69475]: DEBUG nova.compute.manager [None req-1907be6d-d88e-4ba7-a653-6d4f88e1c2cd tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69475) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1197.268133] env[69475]: DEBUG nova.objects.instance [None req-1907be6d-d88e-4ba7-a653-6d4f88e1c2cd tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lazy-loading 'flavor' on Instance uuid d63ddc35-06b3-43a2-bdd5-a91cf4047a4b {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1197.319957] env[69475]: DEBUG nova.scheduler.client.report [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1197.419519] env[69475]: DEBUG nova.network.neutron [req-2fbf8f9b-0e0b-4d5e-adba-90f21b1f1dec req-3b152177-0d73-4c6b-a409-176132c72abe service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Updated VIF entry in instance network info cache for port 6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1197.420078] env[69475]: DEBUG nova.network.neutron [req-2fbf8f9b-0e0b-4d5e-adba-90f21b1f1dec req-3b152177-0d73-4c6b-a409-176132c72abe service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Updating instance_info_cache with network_info: [{"id": "6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c", "address": "fa:16:3e:41:b4:2a", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e3f5a22-9b", "ovs_interfaceid": "6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.481568] env[69475]: DEBUG nova.network.neutron [-] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1197.627333] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6dbe9ff5-a861-4e82-b1e7-bb2cd4d2a6a0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.640255] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824c19a1-733d-4a85-bc0d-b0f44d9e1c25 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.699201] env[69475]: DEBUG oslo_concurrency.lockutils [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.699201] env[69475]: DEBUG oslo_concurrency.lockutils [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.699201] env[69475]: DEBUG oslo_concurrency.lockutils [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.699201] env[69475]: DEBUG oslo_concurrency.lockutils [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.699201] env[69475]: DEBUG oslo_concurrency.lockutils [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.700164] env[69475]: DEBUG nova.compute.manager [req-12ba55ce-ac57-4727-a5af-0a89bab7a75f req-54b107cc-c26a-4d8c-8f08-ab2f773243a8 service nova] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Detach interface failed, port_id=81121438-ec92-4519-97f1-e2a871109623, reason: Instance 460d4b93-b18a-4965-9e2b-8c6175ccc91f could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1197.701483] env[69475]: INFO nova.compute.manager [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Terminating instance [ 1197.703735] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96e5c0c-d863-4921-989b-0403e51d40dd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.727057] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0c4f3e-3b6f-43ef-a30d-8f70ab9b1a25 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.735396] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updating instance '0c1ee654-0d2e-40a8-b9a9-291c6a9ab954' progress to 83 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1197.827881] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.366s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1197.828475] env[69475]: DEBUG nova.compute.manager [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1197.923668] env[69475]: DEBUG oslo_concurrency.lockutils [req-2fbf8f9b-0e0b-4d5e-adba-90f21b1f1dec req-3b152177-0d73-4c6b-a409-176132c72abe service nova] Releasing lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1197.984987] env[69475]: INFO nova.compute.manager [-] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Took 1.87 seconds to deallocate network for instance. [ 1198.148928] env[69475]: DEBUG nova.network.neutron [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Successfully updated port: 1ef95bec-a8fb-4ee7-b99a-299bf62af225 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1198.209779] env[69475]: DEBUG nova.compute.manager [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1198.210094] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1198.210425] env[69475]: DEBUG nova.network.neutron [-] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1198.212153] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-134d8806-f65e-4e8c-aa82-f1a0d730bdd4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.221457] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1198.221697] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b83defd4-49e2-468c-b821-5f6e6a8cfbb1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.229889] env[69475]: DEBUG oslo_vmware.api [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1198.229889] env[69475]: value = "task-3509118" [ 1198.229889] env[69475]: _type = "Task" [ 1198.229889] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.238379] env[69475]: DEBUG oslo_vmware.api [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509118, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.242423] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1198.242943] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b93f2d2b-b7f5-4953-aba2-97b84484dc70 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.250026] env[69475]: DEBUG oslo_vmware.api [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1198.250026] env[69475]: value = "task-3509119" [ 1198.250026] env[69475]: _type = "Task" [ 1198.250026] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.258133] env[69475]: DEBUG oslo_vmware.api [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509119, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.277371] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1907be6d-d88e-4ba7-a653-6d4f88e1c2cd tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1198.277775] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-01fc7c12-113f-4aab-a08c-32fd2eae3b21 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.284955] env[69475]: DEBUG oslo_vmware.api [None req-1907be6d-d88e-4ba7-a653-6d4f88e1c2cd tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1198.284955] env[69475]: value = "task-3509120" [ 1198.284955] env[69475]: _type = "Task" [ 1198.284955] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.295473] env[69475]: DEBUG oslo_vmware.api [None req-1907be6d-d88e-4ba7-a653-6d4f88e1c2cd tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509120, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.333756] env[69475]: DEBUG nova.compute.utils [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1198.335573] env[69475]: DEBUG nova.compute.manager [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1198.335573] env[69475]: DEBUG nova.network.neutron [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1198.377582] env[69475]: DEBUG nova.policy [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '11c9c75b1984423f860daec9827e7ce6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '67d27343d8c04fc9a2bed7a764f6cf82', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1198.531333] env[69475]: INFO nova.compute.manager [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 460d4b93-b18a-4965-9e2b-8c6175ccc91f] Took 0.55 seconds to detach 1 volumes for instance. [ 1198.651316] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1198.651501] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1198.652184] env[69475]: DEBUG nova.network.neutron [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1198.717144] env[69475]: INFO nova.compute.manager [-] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Took 1.55 seconds to deallocate network for instance. [ 1198.720212] env[69475]: DEBUG nova.network.neutron [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Successfully created port: 24283fcb-3bd1-46b1-a7e5-bf792688cc87 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1198.741571] env[69475]: DEBUG oslo_vmware.api [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509118, 'name': PowerOffVM_Task, 'duration_secs': 0.229254} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.741830] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1198.742028] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1198.742289] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13655ad1-e553-4eb4-92be-22b1421fdc17 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.760870] env[69475]: DEBUG oslo_vmware.api [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509119, 'name': PowerOnVM_Task, 'duration_secs': 0.40203} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.761182] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1198.761377] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-067ba6ad-0ce8-465d-878e-71e5f96851a1 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updating instance '0c1ee654-0d2e-40a8-b9a9-291c6a9ab954' progress to 100 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1198.796961] env[69475]: DEBUG oslo_vmware.api [None req-1907be6d-d88e-4ba7-a653-6d4f88e1c2cd tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509120, 'name': PowerOffVM_Task, 'duration_secs': 0.207943} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.797330] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1907be6d-d88e-4ba7-a653-6d4f88e1c2cd tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1198.797597] env[69475]: DEBUG nova.compute.manager [None req-1907be6d-d88e-4ba7-a653-6d4f88e1c2cd tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1198.798458] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8ec0563-c1f6-4314-8cda-4f301077058b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.818027] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1198.818027] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1198.818027] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Deleting the datastore file [datastore1] 92020fc6-aff6-437f-9e26-a5b61ea7e76f {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1198.818027] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4adf685c-d2c9-4224-8fbf-b90dad479aba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.825655] env[69475]: DEBUG oslo_vmware.api [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1198.825655] env[69475]: value = "task-3509122" [ 1198.825655] env[69475]: _type = "Task" [ 1198.825655] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.836468] env[69475]: DEBUG oslo_vmware.api [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509122, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.838666] env[69475]: DEBUG nova.compute.manager [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1199.037107] env[69475]: DEBUG oslo_concurrency.lockutils [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.037418] env[69475]: DEBUG oslo_concurrency.lockutils [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.037615] env[69475]: DEBUG oslo_concurrency.lockutils [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1199.060855] env[69475]: INFO nova.scheduler.client.report [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Deleted allocations for instance 460d4b93-b18a-4965-9e2b-8c6175ccc91f [ 1199.160788] env[69475]: DEBUG nova.compute.manager [req-1be31efe-341a-4281-b299-1e451c07fb7c req-c6d83125-2039-4209-8118-69ef6dd5fe86 service nova] [instance: 4066a18f-acc5-49b5-941c-0711f29bdcd2] Received event network-vif-deleted-325d7757-39de-4455-954e-feb4a1be8355 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1199.161172] env[69475]: DEBUG nova.compute.manager [req-1be31efe-341a-4281-b299-1e451c07fb7c req-c6d83125-2039-4209-8118-69ef6dd5fe86 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Received event network-vif-plugged-1ef95bec-a8fb-4ee7-b99a-299bf62af225 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1199.161428] env[69475]: DEBUG oslo_concurrency.lockutils [req-1be31efe-341a-4281-b299-1e451c07fb7c req-c6d83125-2039-4209-8118-69ef6dd5fe86 service nova] Acquiring lock "eadfee29-c7fc-4d33-8869-7ea8e753554c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.161428] env[69475]: DEBUG oslo_concurrency.lockutils [req-1be31efe-341a-4281-b299-1e451c07fb7c req-c6d83125-2039-4209-8118-69ef6dd5fe86 service nova] Lock "eadfee29-c7fc-4d33-8869-7ea8e753554c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.161546] env[69475]: DEBUG oslo_concurrency.lockutils [req-1be31efe-341a-4281-b299-1e451c07fb7c req-c6d83125-2039-4209-8118-69ef6dd5fe86 service nova] Lock "eadfee29-c7fc-4d33-8869-7ea8e753554c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1199.161708] env[69475]: DEBUG nova.compute.manager [req-1be31efe-341a-4281-b299-1e451c07fb7c req-c6d83125-2039-4209-8118-69ef6dd5fe86 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] No waiting events found dispatching network-vif-plugged-1ef95bec-a8fb-4ee7-b99a-299bf62af225 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1199.161962] env[69475]: WARNING nova.compute.manager [req-1be31efe-341a-4281-b299-1e451c07fb7c req-c6d83125-2039-4209-8118-69ef6dd5fe86 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Received unexpected event network-vif-plugged-1ef95bec-a8fb-4ee7-b99a-299bf62af225 for instance with vm_state active and task_state None. [ 1199.162223] env[69475]: DEBUG nova.compute.manager [req-1be31efe-341a-4281-b299-1e451c07fb7c req-c6d83125-2039-4209-8118-69ef6dd5fe86 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Received event network-changed-1ef95bec-a8fb-4ee7-b99a-299bf62af225 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1199.162318] env[69475]: DEBUG nova.compute.manager [req-1be31efe-341a-4281-b299-1e451c07fb7c req-c6d83125-2039-4209-8118-69ef6dd5fe86 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Refreshing instance network info cache due to event network-changed-1ef95bec-a8fb-4ee7-b99a-299bf62af225. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1199.162486] env[69475]: DEBUG oslo_concurrency.lockutils [req-1be31efe-341a-4281-b299-1e451c07fb7c req-c6d83125-2039-4209-8118-69ef6dd5fe86 service nova] Acquiring lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.193903] env[69475]: WARNING nova.network.neutron [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] 801aee55-f715-4cdf-b89c-184ca3f24866 already exists in list: networks containing: ['801aee55-f715-4cdf-b89c-184ca3f24866']. ignoring it [ 1199.226593] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1199.226872] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1199.227114] env[69475]: DEBUG nova.objects.instance [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lazy-loading 'resources' on Instance uuid 4066a18f-acc5-49b5-941c-0711f29bdcd2 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1199.311967] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1907be6d-d88e-4ba7-a653-6d4f88e1c2cd tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.053s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1199.341584] env[69475]: DEBUG oslo_vmware.api [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509122, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186029} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.341740] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1199.342456] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1199.342456] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1199.342456] env[69475]: INFO nova.compute.manager [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1199.342694] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1199.346600] env[69475]: DEBUG nova.compute.manager [-] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1199.346718] env[69475]: DEBUG nova.network.neutron [-] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1199.569892] env[69475]: DEBUG oslo_concurrency.lockutils [None req-983b48a0-15da-4d7e-9a38-468f45163bbc tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "460d4b93-b18a-4965-9e2b-8c6175ccc91f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.352s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1199.609966] env[69475]: DEBUG nova.network.neutron [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Updating instance_info_cache with network_info: [{"id": "6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c", "address": "fa:16:3e:41:b4:2a", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e3f5a22-9b", "ovs_interfaceid": "6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ef95bec-a8fb-4ee7-b99a-299bf62af225", "address": "fa:16:3e:26:86:b5", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ef95bec-a8", "ovs_interfaceid": "1ef95bec-a8fb-4ee7-b99a-299bf62af225", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.851199] env[69475]: DEBUG nova.compute.manager [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1199.877086] env[69475]: DEBUG nova.virt.hardware [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1199.877366] env[69475]: DEBUG nova.virt.hardware [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1199.877519] env[69475]: DEBUG nova.virt.hardware [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1199.877706] env[69475]: DEBUG nova.virt.hardware [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1199.877840] env[69475]: DEBUG nova.virt.hardware [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1199.877981] env[69475]: DEBUG nova.virt.hardware [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1199.879797] env[69475]: DEBUG nova.virt.hardware [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1199.879797] env[69475]: DEBUG nova.virt.hardware [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1199.879797] env[69475]: DEBUG nova.virt.hardware [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1199.879797] env[69475]: DEBUG nova.virt.hardware [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1199.879797] env[69475]: DEBUG nova.virt.hardware [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1199.880119] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb08679a-4741-46c0-8239-9cac7610791f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.890974] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4af6a0d9-7568-4fbe-9620-26ba8e5860ef {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.936123] env[69475]: DEBUG nova.compute.manager [req-195dd554-6f07-4040-911b-52c26c96869b req-13972f47-1182-494f-937f-6382a3ab925d service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Received event network-vif-deleted-b2b04f22-0a1e-4c90-b84f-5d119fc7e528 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1199.936313] env[69475]: INFO nova.compute.manager [req-195dd554-6f07-4040-911b-52c26c96869b req-13972f47-1182-494f-937f-6382a3ab925d service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Neutron deleted interface b2b04f22-0a1e-4c90-b84f-5d119fc7e528; detaching it from the instance and deleting it from the info cache [ 1199.936480] env[69475]: DEBUG nova.network.neutron [req-195dd554-6f07-4040-911b-52c26c96869b req-13972f47-1182-494f-937f-6382a3ab925d service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.946879] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78adc1fb-ee96-497b-b57e-7e0034b19206 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.956470] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f450c495-e038-4717-bf26-127cd0e1c171 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.989960] env[69475]: DEBUG nova.objects.instance [None req-9dbd1e1d-8877-42ec-9d02-0dd4f9434b09 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lazy-loading 'flavor' on Instance uuid d63ddc35-06b3-43a2-bdd5-a91cf4047a4b {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1199.991992] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a286ad1c-6cda-420d-9353-641bd47a5a81 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.000647] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0815b0e7-7086-4e25-aeb5-d9f8cfd44740 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.019708] env[69475]: DEBUG nova.compute.provider_tree [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1200.113405] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1200.114119] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.114282] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1200.114559] env[69475]: DEBUG oslo_concurrency.lockutils [req-1be31efe-341a-4281-b299-1e451c07fb7c req-c6d83125-2039-4209-8118-69ef6dd5fe86 service nova] Acquired lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1200.114741] env[69475]: DEBUG nova.network.neutron [req-1be31efe-341a-4281-b299-1e451c07fb7c req-c6d83125-2039-4209-8118-69ef6dd5fe86 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Refreshing network info cache for port 1ef95bec-a8fb-4ee7-b99a-299bf62af225 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1200.116859] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-164bd853-07e3-4739-b9d4-05cdb7d8e446 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.135293] env[69475]: DEBUG nova.virt.hardware [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1200.135569] env[69475]: DEBUG nova.virt.hardware [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1200.135671] env[69475]: DEBUG nova.virt.hardware [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1200.135857] env[69475]: DEBUG nova.virt.hardware [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1200.136013] env[69475]: DEBUG nova.virt.hardware [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1200.136166] env[69475]: DEBUG nova.virt.hardware [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1200.136368] env[69475]: DEBUG nova.virt.hardware [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1200.136528] env[69475]: DEBUG nova.virt.hardware [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1200.136694] env[69475]: DEBUG nova.virt.hardware [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1200.136881] env[69475]: DEBUG nova.virt.hardware [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1200.137073] env[69475]: DEBUG nova.virt.hardware [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1200.143180] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Reconfiguring VM to attach interface {{(pid=69475) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1200.144104] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-714df26f-2069-48fb-a539-4e03f13fe8b0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.164882] env[69475]: DEBUG oslo_vmware.api [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1200.164882] env[69475]: value = "task-3509124" [ 1200.164882] env[69475]: _type = "Task" [ 1200.164882] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.173566] env[69475]: DEBUG oslo_vmware.api [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509124, 'name': ReconfigVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.328310] env[69475]: DEBUG nova.network.neutron [-] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.380900] env[69475]: DEBUG nova.network.neutron [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Successfully updated port: 24283fcb-3bd1-46b1-a7e5-bf792688cc87 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1200.439715] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d544f7c0-ebdf-41ad-b67f-05da69c81746 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.451994] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b9671c-4847-4965-ba36-8634f67ae2b5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.490101] env[69475]: DEBUG nova.compute.manager [req-195dd554-6f07-4040-911b-52c26c96869b req-13972f47-1182-494f-937f-6382a3ab925d service nova] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Detach interface failed, port_id=b2b04f22-0a1e-4c90-b84f-5d119fc7e528, reason: Instance 92020fc6-aff6-437f-9e26-a5b61ea7e76f could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1200.499861] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9dbd1e1d-8877-42ec-9d02-0dd4f9434b09 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "refresh_cache-d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.499861] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9dbd1e1d-8877-42ec-9d02-0dd4f9434b09 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquired lock "refresh_cache-d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1200.499861] env[69475]: DEBUG nova.network.neutron [None req-9dbd1e1d-8877-42ec-9d02-0dd4f9434b09 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1200.499861] env[69475]: DEBUG nova.objects.instance [None req-9dbd1e1d-8877-42ec-9d02-0dd4f9434b09 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lazy-loading 'info_cache' on Instance uuid d63ddc35-06b3-43a2-bdd5-a91cf4047a4b {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1200.522783] env[69475]: DEBUG nova.scheduler.client.report [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1200.680585] env[69475]: DEBUG oslo_vmware.api [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509124, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.832074] env[69475]: INFO nova.compute.manager [-] [instance: 92020fc6-aff6-437f-9e26-a5b61ea7e76f] Took 1.49 seconds to deallocate network for instance. [ 1200.885894] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "refresh_cache-da3eff39-b80b-4574-9b07-df6f679a9f38" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.886079] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquired lock "refresh_cache-da3eff39-b80b-4574-9b07-df6f679a9f38" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1200.886272] env[69475]: DEBUG nova.network.neutron [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1200.891548] env[69475]: DEBUG nova.network.neutron [req-1be31efe-341a-4281-b299-1e451c07fb7c req-c6d83125-2039-4209-8118-69ef6dd5fe86 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Updated VIF entry in instance network info cache for port 1ef95bec-a8fb-4ee7-b99a-299bf62af225. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1200.892016] env[69475]: DEBUG nova.network.neutron [req-1be31efe-341a-4281-b299-1e451c07fb7c req-c6d83125-2039-4209-8118-69ef6dd5fe86 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Updating instance_info_cache with network_info: [{"id": "6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c", "address": "fa:16:3e:41:b4:2a", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e3f5a22-9b", "ovs_interfaceid": "6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ef95bec-a8fb-4ee7-b99a-299bf62af225", "address": "fa:16:3e:26:86:b5", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ef95bec-a8", "ovs_interfaceid": "1ef95bec-a8fb-4ee7-b99a-299bf62af225", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.001942] env[69475]: DEBUG nova.objects.base [None req-9dbd1e1d-8877-42ec-9d02-0dd4f9434b09 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1201.028027] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.801s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.057997] env[69475]: INFO nova.scheduler.client.report [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Deleted allocations for instance 4066a18f-acc5-49b5-941c-0711f29bdcd2 [ 1201.059248] env[69475]: DEBUG oslo_concurrency.lockutils [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.059475] env[69475]: DEBUG oslo_concurrency.lockutils [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.176851] env[69475]: DEBUG oslo_vmware.api [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509124, 'name': ReconfigVM_Task, 'duration_secs': 0.581183} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.185021] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1201.185270] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Reconfigured VM to attach interface {{(pid=69475) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1201.192893] env[69475]: DEBUG nova.compute.manager [req-b36c1715-a42f-44e8-8b12-0c6d1c642b66 req-62e8f66d-8b18-4bb9-82f0-faa1c7c1f58c service nova] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Received event network-vif-plugged-24283fcb-3bd1-46b1-a7e5-bf792688cc87 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1201.193217] env[69475]: DEBUG oslo_concurrency.lockutils [req-b36c1715-a42f-44e8-8b12-0c6d1c642b66 req-62e8f66d-8b18-4bb9-82f0-faa1c7c1f58c service nova] Acquiring lock "da3eff39-b80b-4574-9b07-df6f679a9f38-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.193563] env[69475]: DEBUG oslo_concurrency.lockutils [req-b36c1715-a42f-44e8-8b12-0c6d1c642b66 req-62e8f66d-8b18-4bb9-82f0-faa1c7c1f58c service nova] Lock "da3eff39-b80b-4574-9b07-df6f679a9f38-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.193861] env[69475]: DEBUG oslo_concurrency.lockutils [req-b36c1715-a42f-44e8-8b12-0c6d1c642b66 req-62e8f66d-8b18-4bb9-82f0-faa1c7c1f58c service nova] Lock "da3eff39-b80b-4574-9b07-df6f679a9f38-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.194180] env[69475]: DEBUG nova.compute.manager [req-b36c1715-a42f-44e8-8b12-0c6d1c642b66 req-62e8f66d-8b18-4bb9-82f0-faa1c7c1f58c service nova] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] No waiting events found dispatching network-vif-plugged-24283fcb-3bd1-46b1-a7e5-bf792688cc87 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1201.194468] env[69475]: WARNING nova.compute.manager [req-b36c1715-a42f-44e8-8b12-0c6d1c642b66 req-62e8f66d-8b18-4bb9-82f0-faa1c7c1f58c service nova] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Received unexpected event network-vif-plugged-24283fcb-3bd1-46b1-a7e5-bf792688cc87 for instance with vm_state building and task_state spawning. [ 1201.194817] env[69475]: DEBUG nova.compute.manager [req-b36c1715-a42f-44e8-8b12-0c6d1c642b66 req-62e8f66d-8b18-4bb9-82f0-faa1c7c1f58c service nova] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Received event network-changed-24283fcb-3bd1-46b1-a7e5-bf792688cc87 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1201.195025] env[69475]: DEBUG nova.compute.manager [req-b36c1715-a42f-44e8-8b12-0c6d1c642b66 req-62e8f66d-8b18-4bb9-82f0-faa1c7c1f58c service nova] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Refreshing instance network info cache due to event network-changed-24283fcb-3bd1-46b1-a7e5-bf792688cc87. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1201.195293] env[69475]: DEBUG oslo_concurrency.lockutils [req-b36c1715-a42f-44e8-8b12-0c6d1c642b66 req-62e8f66d-8b18-4bb9-82f0-faa1c7c1f58c service nova] Acquiring lock "refresh_cache-da3eff39-b80b-4574-9b07-df6f679a9f38" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.338360] env[69475]: DEBUG oslo_concurrency.lockutils [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1201.338642] env[69475]: DEBUG oslo_concurrency.lockutils [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1201.338864] env[69475]: DEBUG nova.objects.instance [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lazy-loading 'resources' on Instance uuid 92020fc6-aff6-437f-9e26-a5b61ea7e76f {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1201.357673] env[69475]: DEBUG nova.network.neutron [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Port 5b51cc5d-6e38-423f-8f69-13541ea8a317 binding to destination host cpu-1 is already ACTIVE {{(pid=69475) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1201.357993] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.358192] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1201.358364] env[69475]: DEBUG nova.network.neutron [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1201.395088] env[69475]: DEBUG oslo_concurrency.lockutils [req-1be31efe-341a-4281-b299-1e451c07fb7c req-c6d83125-2039-4209-8118-69ef6dd5fe86 service nova] Releasing lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1201.417179] env[69475]: DEBUG nova.network.neutron [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1201.538248] env[69475]: DEBUG nova.network.neutron [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Updating instance_info_cache with network_info: [{"id": "24283fcb-3bd1-46b1-a7e5-bf792688cc87", "address": "fa:16:3e:54:61:67", "network": {"id": "b825e883-3197-4a41-a94b-f363fe49fc0d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1376961670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67d27343d8c04fc9a2bed7a764f6cf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24283fcb-3b", "ovs_interfaceid": "24283fcb-3bd1-46b1-a7e5-bf792688cc87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.563370] env[69475]: DEBUG nova.compute.manager [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1201.569401] env[69475]: DEBUG oslo_concurrency.lockutils [None req-6ee7035f-25ae-4155-8b90-02097872e8a2 tempest-ServersTestJSON-711529090 tempest-ServersTestJSON-711529090-project-member] Lock "4066a18f-acc5-49b5-941c-0711f29bdcd2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.056s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.697731] env[69475]: DEBUG oslo_concurrency.lockutils [None req-5681dfa3-8e5f-476d-b93d-10360b3d56ff tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-eadfee29-c7fc-4d33-8869-7ea8e753554c-1ef95bec-a8fb-4ee7-b99a-299bf62af225" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.668s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.719986] env[69475]: DEBUG nova.network.neutron [None req-9dbd1e1d-8877-42ec-9d02-0dd4f9434b09 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Updating instance_info_cache with network_info: [{"id": "face26ac-c45b-4932-b32e-bd2d172da60d", "address": "fa:16:3e:83:66:4e", "network": {"id": "5996fb14-ef37-4ad6-bdc7-a1fe757f6765", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-259172914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9101c50cbfe74c99b1e1a528cb5b5994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55520f67-d092-4eb7-940f-d7cceaa1ca1c", "external-id": "nsx-vlan-transportzone-717", "segmentation_id": 717, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapface26ac-c4", "ovs_interfaceid": "face26ac-c45b-4932-b32e-bd2d172da60d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.023320] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18dbe450-8fb4-4bf2-b729-f7f7f968947a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.034202] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0896f0e5-cbc0-451c-8e22-00c28b5c8834 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.040613] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Releasing lock "refresh_cache-da3eff39-b80b-4574-9b07-df6f679a9f38" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1202.040951] env[69475]: DEBUG nova.compute.manager [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Instance network_info: |[{"id": "24283fcb-3bd1-46b1-a7e5-bf792688cc87", "address": "fa:16:3e:54:61:67", "network": {"id": "b825e883-3197-4a41-a94b-f363fe49fc0d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1376961670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67d27343d8c04fc9a2bed7a764f6cf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24283fcb-3b", "ovs_interfaceid": "24283fcb-3bd1-46b1-a7e5-bf792688cc87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1202.064868] env[69475]: DEBUG oslo_concurrency.lockutils [req-b36c1715-a42f-44e8-8b12-0c6d1c642b66 req-62e8f66d-8b18-4bb9-82f0-faa1c7c1f58c service nova] Acquired lock "refresh_cache-da3eff39-b80b-4574-9b07-df6f679a9f38" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1202.065948] env[69475]: DEBUG nova.network.neutron [req-b36c1715-a42f-44e8-8b12-0c6d1c642b66 req-62e8f66d-8b18-4bb9-82f0-faa1c7c1f58c service nova] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Refreshing network info cache for port 24283fcb-3bd1-46b1-a7e5-bf792688cc87 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1202.066452] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:61:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '94926d5b-bfab-4c04-85b5-0fe89934c8ff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '24283fcb-3bd1-46b1-a7e5-bf792688cc87', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1202.073858] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1202.081174] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1202.081916] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a855385-e1bd-4314-a50b-7bcfb2d01782 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.086352] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-abdbf6a1-785a-4ff8-bbc7-0ab2efcacab7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.107444] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e61ac5f-f993-4d6e-9d2b-98a87b2508b4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.113369] env[69475]: DEBUG oslo_concurrency.lockutils [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1202.114270] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1202.114270] env[69475]: value = "task-3509125" [ 1202.114270] env[69475]: _type = "Task" [ 1202.114270] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.125298] env[69475]: DEBUG nova.compute.provider_tree [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1202.132360] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509125, 'name': CreateVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.133340] env[69475]: DEBUG nova.network.neutron [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updating instance_info_cache with network_info: [{"id": "5b51cc5d-6e38-423f-8f69-13541ea8a317", "address": "fa:16:3e:cd:15:12", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b51cc5d-6e", "ovs_interfaceid": "5b51cc5d-6e38-423f-8f69-13541ea8a317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.224631] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9dbd1e1d-8877-42ec-9d02-0dd4f9434b09 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Releasing lock "refresh_cache-d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1202.326484] env[69475]: DEBUG nova.network.neutron [req-b36c1715-a42f-44e8-8b12-0c6d1c642b66 req-62e8f66d-8b18-4bb9-82f0-faa1c7c1f58c service nova] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Updated VIF entry in instance network info cache for port 24283fcb-3bd1-46b1-a7e5-bf792688cc87. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1202.326885] env[69475]: DEBUG nova.network.neutron [req-b36c1715-a42f-44e8-8b12-0c6d1c642b66 req-62e8f66d-8b18-4bb9-82f0-faa1c7c1f58c service nova] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Updating instance_info_cache with network_info: [{"id": "24283fcb-3bd1-46b1-a7e5-bf792688cc87", "address": "fa:16:3e:54:61:67", "network": {"id": "b825e883-3197-4a41-a94b-f363fe49fc0d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1376961670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67d27343d8c04fc9a2bed7a764f6cf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24283fcb-3b", "ovs_interfaceid": "24283fcb-3bd1-46b1-a7e5-bf792688cc87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.629467] env[69475]: DEBUG nova.scheduler.client.report [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1202.632503] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509125, 'name': CreateVM_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.635448] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1202.829610] env[69475]: DEBUG oslo_concurrency.lockutils [req-b36c1715-a42f-44e8-8b12-0c6d1c642b66 req-62e8f66d-8b18-4bb9-82f0-faa1c7c1f58c service nova] Releasing lock "refresh_cache-da3eff39-b80b-4574-9b07-df6f679a9f38" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1203.128347] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509125, 'name': CreateVM_Task, 'duration_secs': 0.698617} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.129096] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1203.129857] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.131052] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1203.131052] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1203.131052] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1233ac35-2fe2-4df8-a381-984e64e21ea3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.133829] env[69475]: DEBUG oslo_concurrency.lockutils [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.795s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.136542] env[69475]: DEBUG oslo_concurrency.lockutils [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.023s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.139136] env[69475]: INFO nova.compute.claims [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1203.143719] env[69475]: DEBUG nova.compute.manager [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=69475) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1203.143719] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.143719] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1203.143719] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f30791-5b90-8e9d-13c0-7dc195692c2c" [ 1203.143719] env[69475]: _type = "Task" [ 1203.143719] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.153904] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52f30791-5b90-8e9d-13c0-7dc195692c2c, 'name': SearchDatastore_Task, 'duration_secs': 0.01194} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.154423] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1203.154741] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1203.154908] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.155452] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1203.155452] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1203.155630] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-eabe88c2-94db-43e6-80fc-bf083e4d1cbc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.165928] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1203.167192] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1203.167192] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9873dc3-2afa-4582-acc8-b8605b9b9f67 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.170679] env[69475]: INFO nova.scheduler.client.report [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Deleted allocations for instance 92020fc6-aff6-437f-9e26-a5b61ea7e76f [ 1203.174812] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1203.174812] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5294c29b-f89c-993c-2d5c-43fb50ac2db8" [ 1203.174812] env[69475]: _type = "Task" [ 1203.174812] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.190298] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5294c29b-f89c-993c-2d5c-43fb50ac2db8, 'name': SearchDatastore_Task, 'duration_secs': 0.010467} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.191636] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-478dc96b-932c-4b2d-aaaa-b46b116c4443 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.197133] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1203.197133] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522712e9-ebdb-d250-8402-7420021b2b95" [ 1203.197133] env[69475]: _type = "Task" [ 1203.197133] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.207748] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522712e9-ebdb-d250-8402-7420021b2b95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.231097] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dbd1e1d-8877-42ec-9d02-0dd4f9434b09 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1203.231403] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-487a06a1-2ebf-4fa1-9f66-227fefb5d88d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.240913] env[69475]: DEBUG oslo_vmware.api [None req-9dbd1e1d-8877-42ec-9d02-0dd4f9434b09 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1203.240913] env[69475]: value = "task-3509126" [ 1203.240913] env[69475]: _type = "Task" [ 1203.240913] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.249781] env[69475]: DEBUG oslo_vmware.api [None req-9dbd1e1d-8877-42ec-9d02-0dd4f9434b09 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509126, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.276881] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "interface-eadfee29-c7fc-4d33-8869-7ea8e753554c-1ef95bec-a8fb-4ee7-b99a-299bf62af225" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.277159] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-eadfee29-c7fc-4d33-8869-7ea8e753554c-1ef95bec-a8fb-4ee7-b99a-299bf62af225" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.685334] env[69475]: DEBUG oslo_concurrency.lockutils [None req-949c2253-4a13-4541-89cc-fda064ae80ce tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "92020fc6-aff6-437f-9e26-a5b61ea7e76f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.987s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1203.708056] env[69475]: DEBUG oslo_concurrency.lockutils [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1203.708375] env[69475]: DEBUG oslo_concurrency.lockutils [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1203.717994] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522712e9-ebdb-d250-8402-7420021b2b95, 'name': SearchDatastore_Task, 'duration_secs': 0.010389} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.718651] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1203.719101] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] da3eff39-b80b-4574-9b07-df6f679a9f38/da3eff39-b80b-4574-9b07-df6f679a9f38.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1203.719546] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d814fbe9-1632-456f-8fce-cf9ace0541a4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.730732] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1203.730732] env[69475]: value = "task-3509127" [ 1203.730732] env[69475]: _type = "Task" [ 1203.730732] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.740848] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509127, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.757019] env[69475]: DEBUG oslo_vmware.api [None req-9dbd1e1d-8877-42ec-9d02-0dd4f9434b09 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509126, 'name': PowerOnVM_Task, 'duration_secs': 0.458838} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.757787] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9dbd1e1d-8877-42ec-9d02-0dd4f9434b09 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1203.757787] env[69475]: DEBUG nova.compute.manager [None req-9dbd1e1d-8877-42ec-9d02-0dd4f9434b09 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1203.758805] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d149a7fe-4c0a-464e-90df-c8fe28c40ce7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.781036] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1203.781644] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1203.783049] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13fc363d-8ea3-4d6e-99f1-92e14379ea7c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.802648] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f805e3f-639a-4452-b23e-a6184c5dbd3a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.830594] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Reconfiguring VM to detach interface {{(pid=69475) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1203.831699] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9fe7faa-3dd0-4548-b513-741da9b939d4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.851768] env[69475]: DEBUG oslo_vmware.api [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1203.851768] env[69475]: value = "task-3509128" [ 1203.851768] env[69475]: _type = "Task" [ 1203.851768] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.865662] env[69475]: DEBUG oslo_vmware.api [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509128, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.219535] env[69475]: DEBUG nova.compute.manager [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1204.247539] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509127, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.358690] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-121da034-3681-44bc-9f3d-d660b6868805 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.368254] env[69475]: DEBUG oslo_vmware.api [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.371331] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bb1b1bd-63a4-41d3-9c55-744476f9a9c7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.405642] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560f91aa-2a55-4ca4-aacc-f1d918a698b8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.414265] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f74c598-dd57-480c-bf88-735cf7bd97eb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.428365] env[69475]: DEBUG nova.compute.provider_tree [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1204.740398] env[69475]: DEBUG oslo_concurrency.lockutils [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1204.745053] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509127, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.718348} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.745053] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] da3eff39-b80b-4574-9b07-df6f679a9f38/da3eff39-b80b-4574-9b07-df6f679a9f38.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1204.745053] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1204.745053] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6b58ad8e-7f1d-4ed9-b089-7db24cf81c32 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.752542] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1204.752542] env[69475]: value = "task-3509130" [ 1204.752542] env[69475]: _type = "Task" [ 1204.752542] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.760606] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509130, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.865316] env[69475]: DEBUG oslo_vmware.api [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.931787] env[69475]: DEBUG nova.scheduler.client.report [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1205.262882] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509130, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065512} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.263236] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1205.263924] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66ecd6d5-ccd4-49f8-919f-02329fb5637f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.286301] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] da3eff39-b80b-4574-9b07-df6f679a9f38/da3eff39-b80b-4574-9b07-df6f679a9f38.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1205.286536] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b42f4681-f4ff-4657-83b6-02c656002680 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.306189] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1205.306189] env[69475]: value = "task-3509131" [ 1205.306189] env[69475]: _type = "Task" [ 1205.306189] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.313994] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509131, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.362927] env[69475]: DEBUG oslo_vmware.api [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.436508] env[69475]: DEBUG oslo_concurrency.lockutils [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.300s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1205.437187] env[69475]: DEBUG nova.compute.manager [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1205.439947] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 2.297s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1205.818968] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509131, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.866596] env[69475]: DEBUG oslo_vmware.api [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.943959] env[69475]: DEBUG nova.compute.utils [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1205.946016] env[69475]: DEBUG nova.objects.instance [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lazy-loading 'migration_context' on Instance uuid 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1205.947705] env[69475]: DEBUG nova.compute.manager [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1205.947888] env[69475]: DEBUG nova.network.neutron [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1205.996279] env[69475]: DEBUG nova.policy [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a123051be3624b50ab42a4254f687767', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ca5098b4aae94c08b3f8ffd66aae2e2c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1206.317826] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509131, 'name': ReconfigVM_Task, 'duration_secs': 0.648678} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.318311] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Reconfigured VM instance instance-00000079 to attach disk [datastore1] da3eff39-b80b-4574-9b07-df6f679a9f38/da3eff39-b80b-4574-9b07-df6f679a9f38.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1206.319019] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fdc6c669-5bb4-427a-b66a-4d9ed5286dee {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.326350] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1206.326350] env[69475]: value = "task-3509132" [ 1206.326350] env[69475]: _type = "Task" [ 1206.326350] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.334941] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509132, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.335718] env[69475]: DEBUG nova.network.neutron [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Successfully created port: 530ddca5-14b1-40c3-912c-998398a229c1 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1206.366153] env[69475]: DEBUG oslo_vmware.api [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.448719] env[69475]: DEBUG nova.compute.manager [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1206.647181] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20edbda-49c7-4eb2-9a52-23b323ae6d1b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.660400] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a716fd0f-61e6-4943-ae16-fe56f1186c58 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.693984] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a292fcc-9b05-4751-b5da-88115d8375ab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.702393] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70811d7-92a7-47c3-b185-3caa6c12b4b8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.716419] env[69475]: DEBUG nova.compute.provider_tree [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1206.837179] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509132, 'name': Rename_Task, 'duration_secs': 0.150301} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.837475] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1206.837703] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1f9b3b32-f846-40da-9d36-e352747500e8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.844344] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1206.844344] env[69475]: value = "task-3509133" [ 1206.844344] env[69475]: _type = "Task" [ 1206.844344] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.852195] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509133, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.865262] env[69475]: DEBUG oslo_vmware.api [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.219404] env[69475]: DEBUG nova.scheduler.client.report [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1207.356815] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509133, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.365840] env[69475]: DEBUG oslo_vmware.api [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.462896] env[69475]: DEBUG nova.compute.manager [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1207.490298] env[69475]: DEBUG nova.virt.hardware [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1207.490555] env[69475]: DEBUG nova.virt.hardware [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1207.490710] env[69475]: DEBUG nova.virt.hardware [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1207.490889] env[69475]: DEBUG nova.virt.hardware [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1207.491071] env[69475]: DEBUG nova.virt.hardware [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1207.491232] env[69475]: DEBUG nova.virt.hardware [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1207.491443] env[69475]: DEBUG nova.virt.hardware [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1207.491600] env[69475]: DEBUG nova.virt.hardware [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1207.491765] env[69475]: DEBUG nova.virt.hardware [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1207.491926] env[69475]: DEBUG nova.virt.hardware [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1207.492116] env[69475]: DEBUG nova.virt.hardware [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1207.492968] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3696a048-0b7f-49ea-b874-c3b2ec9b1a1c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.502037] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c90ba89-5642-4f0c-a4be-9b6968536b7a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.773817] env[69475]: DEBUG nova.compute.manager [req-b9f686f7-8c83-488c-9be8-e3b02c62b207 req-1dce6304-def5-437c-af4c-0404d15f1a6d service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Received event network-vif-plugged-530ddca5-14b1-40c3-912c-998398a229c1 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1207.774382] env[69475]: DEBUG oslo_concurrency.lockutils [req-b9f686f7-8c83-488c-9be8-e3b02c62b207 req-1dce6304-def5-437c-af4c-0404d15f1a6d service nova] Acquiring lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1207.775125] env[69475]: DEBUG oslo_concurrency.lockutils [req-b9f686f7-8c83-488c-9be8-e3b02c62b207 req-1dce6304-def5-437c-af4c-0404d15f1a6d service nova] Lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1207.775125] env[69475]: DEBUG oslo_concurrency.lockutils [req-b9f686f7-8c83-488c-9be8-e3b02c62b207 req-1dce6304-def5-437c-af4c-0404d15f1a6d service nova] Lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1207.775336] env[69475]: DEBUG nova.compute.manager [req-b9f686f7-8c83-488c-9be8-e3b02c62b207 req-1dce6304-def5-437c-af4c-0404d15f1a6d service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] No waiting events found dispatching network-vif-plugged-530ddca5-14b1-40c3-912c-998398a229c1 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1207.775465] env[69475]: WARNING nova.compute.manager [req-b9f686f7-8c83-488c-9be8-e3b02c62b207 req-1dce6304-def5-437c-af4c-0404d15f1a6d service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Received unexpected event network-vif-plugged-530ddca5-14b1-40c3-912c-998398a229c1 for instance with vm_state building and task_state spawning. [ 1207.855588] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509133, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.866696] env[69475]: DEBUG oslo_vmware.api [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.876272] env[69475]: DEBUG nova.network.neutron [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Successfully updated port: 530ddca5-14b1-40c3-912c-998398a229c1 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1208.232079] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.791s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.239455] env[69475]: DEBUG oslo_concurrency.lockutils [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.499s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1208.241820] env[69475]: INFO nova.compute.claims [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1208.354852] env[69475]: DEBUG oslo_vmware.api [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509133, 'name': PowerOnVM_Task, 'duration_secs': 1.090158} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.355124] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1208.355324] env[69475]: INFO nova.compute.manager [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Took 8.50 seconds to spawn the instance on the hypervisor. [ 1208.355502] env[69475]: DEBUG nova.compute.manager [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1208.356291] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77699db-eefb-4669-a09c-87768c7ad5b5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.371389] env[69475]: DEBUG oslo_vmware.api [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.379603] env[69475]: DEBUG oslo_concurrency.lockutils [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1208.379949] env[69475]: DEBUG oslo_concurrency.lockutils [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1208.379949] env[69475]: DEBUG nova.network.neutron [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1208.870619] env[69475]: DEBUG oslo_vmware.api [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.884067] env[69475]: INFO nova.compute.manager [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Took 13.45 seconds to build instance. [ 1208.948956] env[69475]: DEBUG nova.network.neutron [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1209.321880] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "f8a82046-4589-45d2-a7a3-466fe4d8f9c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.322141] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "f8a82046-4589-45d2-a7a3-466fe4d8f9c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.322365] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "f8a82046-4589-45d2-a7a3-466fe4d8f9c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1209.322561] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "f8a82046-4589-45d2-a7a3-466fe4d8f9c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1209.322795] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "f8a82046-4589-45d2-a7a3-466fe4d8f9c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.326331] env[69475]: INFO nova.compute.manager [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Terminating instance [ 1209.370395] env[69475]: DEBUG oslo_vmware.api [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509128, 'name': ReconfigVM_Task} progress is 18%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.387053] env[69475]: DEBUG oslo_concurrency.lockutils [None req-fd65b1b9-a449-4889-bc21-6ce8c50381bb tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "da3eff39-b80b-4574-9b07-df6f679a9f38" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.968s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1209.397915] env[69475]: DEBUG nova.network.neutron [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Updating instance_info_cache with network_info: [{"id": "530ddca5-14b1-40c3-912c-998398a229c1", "address": "fa:16:3e:6d:c6:28", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap530ddca5-14", "ovs_interfaceid": "530ddca5-14b1-40c3-912c-998398a229c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1209.439315] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-493764fe-8e2d-40d1-8cf7-00b409fddd9f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.448627] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572b61be-a72d-4c43-8a12-51df4d51abe0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.489952] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19ecbcc1-9c04-4d46-9426-8638295738e2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.498847] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c9ad0c-be00-4885-8df7-66a0eec0792d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.512878] env[69475]: DEBUG nova.compute.provider_tree [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1209.779205] env[69475]: INFO nova.compute.manager [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Swapping old allocation on dict_keys(['dd221100-68c1-4a75-92b5-b24d81fee5da']) held by migration 64adaee0-7956-4547-b9fa-ad36031552dd for instance [ 1209.805316] env[69475]: DEBUG nova.scheduler.client.report [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Overwriting current allocation {'allocations': {'dd221100-68c1-4a75-92b5-b24d81fee5da': {'resources': {'VCPU': 1, 'MEMORY_MB': 256, 'DISK_GB': 1}, 'generation': 175}}, 'project_id': 'de2b24bdabce45a7884bdce4ed781c79', 'user_id': 'bb93c2f0a3554be8b25cde370a4083ac', 'consumer_generation': 1} on consumer 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954 {{(pid=69475) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1209.810238] env[69475]: DEBUG nova.compute.manager [req-df1c48c1-3370-4dc7-838c-60311c9aa133 req-3e90d48c-fefd-4b55-9c04-268e1f4b8771 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Received event network-changed-530ddca5-14b1-40c3-912c-998398a229c1 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1209.810476] env[69475]: DEBUG nova.compute.manager [req-df1c48c1-3370-4dc7-838c-60311c9aa133 req-3e90d48c-fefd-4b55-9c04-268e1f4b8771 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Refreshing instance network info cache due to event network-changed-530ddca5-14b1-40c3-912c-998398a229c1. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1209.810730] env[69475]: DEBUG oslo_concurrency.lockutils [req-df1c48c1-3370-4dc7-838c-60311c9aa133 req-3e90d48c-fefd-4b55-9c04-268e1f4b8771 service nova] Acquiring lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.830641] env[69475]: DEBUG nova.compute.manager [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1209.830855] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1209.831759] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00cb7c36-0cd7-42e2-bc94-7314e247b7f8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.840137] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1209.840386] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9035419a-a8aa-446f-b0f9-46d778243ef9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.848999] env[69475]: DEBUG oslo_vmware.api [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1209.848999] env[69475]: value = "task-3509134" [ 1209.848999] env[69475]: _type = "Task" [ 1209.848999] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.861065] env[69475]: DEBUG oslo_vmware.api [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509134, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.869927] env[69475]: DEBUG oslo_vmware.api [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509128, 'name': ReconfigVM_Task, 'duration_secs': 5.757817} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.870229] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1209.870504] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Reconfigured VM to detach interface {{(pid=69475) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1209.889055] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1209.889243] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1209.889442] env[69475]: DEBUG nova.network.neutron [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1209.901582] env[69475]: DEBUG oslo_concurrency.lockutils [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1209.901913] env[69475]: DEBUG nova.compute.manager [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Instance network_info: |[{"id": "530ddca5-14b1-40c3-912c-998398a229c1", "address": "fa:16:3e:6d:c6:28", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap530ddca5-14", "ovs_interfaceid": "530ddca5-14b1-40c3-912c-998398a229c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1209.902476] env[69475]: DEBUG oslo_concurrency.lockutils [req-df1c48c1-3370-4dc7-838c-60311c9aa133 req-3e90d48c-fefd-4b55-9c04-268e1f4b8771 service nova] Acquired lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1209.902686] env[69475]: DEBUG nova.network.neutron [req-df1c48c1-3370-4dc7-838c-60311c9aa133 req-3e90d48c-fefd-4b55-9c04-268e1f4b8771 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Refreshing network info cache for port 530ddca5-14b1-40c3-912c-998398a229c1 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1209.903970] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:c6:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '530ddca5-14b1-40c3-912c-998398a229c1', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1209.911767] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1209.913025] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1209.913133] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34965f65-ec1a-4a71-a05e-215679ff90b5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.938654] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1209.938654] env[69475]: value = "task-3509135" [ 1209.938654] env[69475]: _type = "Task" [ 1209.938654] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.950559] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509135, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.016255] env[69475]: DEBUG nova.scheduler.client.report [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1210.360906] env[69475]: DEBUG oslo_vmware.api [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509134, 'name': PowerOffVM_Task, 'duration_secs': 0.25209} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.361274] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1210.361474] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1210.361743] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3e54b646-6617-4d1f-8cfd-8d8006eb61c6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.428616] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1210.429037] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1210.429037] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Deleting the datastore file [datastore1] f8a82046-4589-45d2-a7a3-466fe4d8f9c6 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1210.429784] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4d7a29a9-86a6-4399-af6d-be0e40e3471f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.437090] env[69475]: DEBUG oslo_vmware.api [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1210.437090] env[69475]: value = "task-3509137" [ 1210.437090] env[69475]: _type = "Task" [ 1210.437090] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.449981] env[69475]: DEBUG oslo_vmware.api [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509137, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.456089] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509135, 'name': CreateVM_Task, 'duration_secs': 0.32399} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.456632] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1210.459140] env[69475]: DEBUG oslo_concurrency.lockutils [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.459275] env[69475]: DEBUG oslo_concurrency.lockutils [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1210.460054] env[69475]: DEBUG oslo_concurrency.lockutils [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1210.460054] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5a9e8f2-f4ca-4454-a13d-8d3c998089da {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.465185] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1210.465185] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a49542-02da-2c78-d80d-91de41ba40c9" [ 1210.465185] env[69475]: _type = "Task" [ 1210.465185] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.475146] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a49542-02da-2c78-d80d-91de41ba40c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.522222] env[69475]: DEBUG oslo_concurrency.lockutils [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.283s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1210.522695] env[69475]: DEBUG nova.compute.manager [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1210.605278] env[69475]: DEBUG nova.network.neutron [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updating instance_info_cache with network_info: [{"id": "5b51cc5d-6e38-423f-8f69-13541ea8a317", "address": "fa:16:3e:cd:15:12", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b51cc5d-6e", "ovs_interfaceid": "5b51cc5d-6e38-423f-8f69-13541ea8a317", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.667550] env[69475]: DEBUG nova.network.neutron [req-df1c48c1-3370-4dc7-838c-60311c9aa133 req-3e90d48c-fefd-4b55-9c04-268e1f4b8771 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Updated VIF entry in instance network info cache for port 530ddca5-14b1-40c3-912c-998398a229c1. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1210.668088] env[69475]: DEBUG nova.network.neutron [req-df1c48c1-3370-4dc7-838c-60311c9aa133 req-3e90d48c-fefd-4b55-9c04-268e1f4b8771 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Updating instance_info_cache with network_info: [{"id": "530ddca5-14b1-40c3-912c-998398a229c1", "address": "fa:16:3e:6d:c6:28", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap530ddca5-14", "ovs_interfaceid": "530ddca5-14b1-40c3-912c-998398a229c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1210.948378] env[69475]: DEBUG oslo_vmware.api [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509137, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201475} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.948584] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1210.948770] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1210.948953] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1210.949161] env[69475]: INFO nova.compute.manager [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1210.949399] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1210.949595] env[69475]: DEBUG nova.compute.manager [-] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1210.949690] env[69475]: DEBUG nova.network.neutron [-] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1210.975918] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a49542-02da-2c78-d80d-91de41ba40c9, 'name': SearchDatastore_Task, 'duration_secs': 0.020401} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.975918] env[69475]: DEBUG oslo_concurrency.lockutils [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1210.976211] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1210.976393] env[69475]: DEBUG oslo_concurrency.lockutils [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1210.976545] env[69475]: DEBUG oslo_concurrency.lockutils [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1210.976722] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1210.976995] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-67adea3c-601b-4240-aea3-75648d34d686 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.986657] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1210.986831] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1210.987581] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8066849-9ba3-4666-adc6-62f60a89480b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.995256] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1210.995256] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523660fa-3c9b-03e8-c1c7-ae2a4fc87d78" [ 1210.995256] env[69475]: _type = "Task" [ 1210.995256] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.003561] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523660fa-3c9b-03e8-c1c7-ae2a4fc87d78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.028434] env[69475]: DEBUG nova.compute.utils [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1211.029899] env[69475]: DEBUG nova.compute.manager [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1211.030080] env[69475]: DEBUG nova.network.neutron [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1211.066415] env[69475]: DEBUG nova.policy [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba09f56e4fda4fc99602796a0af6cb33', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e87670cfd2b848af98507a5ebf9fab51', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1211.108270] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "refresh_cache-0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1211.108702] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1211.109780] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3011f7dc-8e31-4322-b28e-407bf6eca49f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.119989] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1211.119989] env[69475]: value = "task-3509138" [ 1211.119989] env[69475]: _type = "Task" [ 1211.119989] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.131058] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509138, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.171023] env[69475]: DEBUG oslo_concurrency.lockutils [req-df1c48c1-3370-4dc7-838c-60311c9aa133 req-3e90d48c-fefd-4b55-9c04-268e1f4b8771 service nova] Releasing lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1211.171373] env[69475]: DEBUG nova.compute.manager [req-df1c48c1-3370-4dc7-838c-60311c9aa133 req-3e90d48c-fefd-4b55-9c04-268e1f4b8771 service nova] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Received event network-changed-24283fcb-3bd1-46b1-a7e5-bf792688cc87 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1211.171562] env[69475]: DEBUG nova.compute.manager [req-df1c48c1-3370-4dc7-838c-60311c9aa133 req-3e90d48c-fefd-4b55-9c04-268e1f4b8771 service nova] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Refreshing instance network info cache due to event network-changed-24283fcb-3bd1-46b1-a7e5-bf792688cc87. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1211.171804] env[69475]: DEBUG oslo_concurrency.lockutils [req-df1c48c1-3370-4dc7-838c-60311c9aa133 req-3e90d48c-fefd-4b55-9c04-268e1f4b8771 service nova] Acquiring lock "refresh_cache-da3eff39-b80b-4574-9b07-df6f679a9f38" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.171951] env[69475]: DEBUG oslo_concurrency.lockutils [req-df1c48c1-3370-4dc7-838c-60311c9aa133 req-3e90d48c-fefd-4b55-9c04-268e1f4b8771 service nova] Acquired lock "refresh_cache-da3eff39-b80b-4574-9b07-df6f679a9f38" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1211.172124] env[69475]: DEBUG nova.network.neutron [req-df1c48c1-3370-4dc7-838c-60311c9aa133 req-3e90d48c-fefd-4b55-9c04-268e1f4b8771 service nova] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Refreshing network info cache for port 24283fcb-3bd1-46b1-a7e5-bf792688cc87 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1211.205091] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.205299] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquired lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1211.205480] env[69475]: DEBUG nova.network.neutron [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1211.370184] env[69475]: DEBUG nova.network.neutron [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Successfully created port: 789f3dcb-d9c8-495b-b66c-896fb31e0e63 {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1211.507782] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523660fa-3c9b-03e8-c1c7-ae2a4fc87d78, 'name': SearchDatastore_Task, 'duration_secs': 0.019291} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.508662] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d2274fd-d8c4-4c6f-a5a6-00b1c0378fba {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.514531] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1211.514531] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5268204c-f4cf-ce1e-5e82-d8e011cdcf1e" [ 1211.514531] env[69475]: _type = "Task" [ 1211.514531] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.524190] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5268204c-f4cf-ce1e-5e82-d8e011cdcf1e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.533249] env[69475]: DEBUG nova.compute.manager [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1211.631516] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509138, 'name': PowerOffVM_Task, 'duration_secs': 0.329404} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.631786] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1211.632517] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1211.632739] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1211.632894] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1211.633090] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1211.633665] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1211.633665] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1211.633665] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1211.633820] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1211.633952] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1211.634183] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1211.634400] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1211.639983] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1718635-e849-41e8-8506-c612822e04cb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.657711] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1211.657711] env[69475]: value = "task-3509139" [ 1211.657711] env[69475]: _type = "Task" [ 1211.657711] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.667041] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509139, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.723160] env[69475]: DEBUG nova.network.neutron [-] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1211.850785] env[69475]: DEBUG nova.compute.manager [req-07edd1cc-dcca-4900-a0b4-6a68fca01578 req-2f03bb26-134b-482d-ad7a-89fff71969b9 service nova] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Received event network-vif-deleted-24efd80f-72cd-4c40-962a-103b1ca55a1f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1211.851493] env[69475]: DEBUG oslo_concurrency.lockutils [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "eadfee29-c7fc-4d33-8869-7ea8e753554c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.851747] env[69475]: DEBUG oslo_concurrency.lockutils [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "eadfee29-c7fc-4d33-8869-7ea8e753554c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.851899] env[69475]: DEBUG oslo_concurrency.lockutils [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "eadfee29-c7fc-4d33-8869-7ea8e753554c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.852258] env[69475]: DEBUG oslo_concurrency.lockutils [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "eadfee29-c7fc-4d33-8869-7ea8e753554c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.852531] env[69475]: DEBUG oslo_concurrency.lockutils [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "eadfee29-c7fc-4d33-8869-7ea8e753554c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.854669] env[69475]: INFO nova.compute.manager [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Terminating instance [ 1212.025461] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5268204c-f4cf-ce1e-5e82-d8e011cdcf1e, 'name': SearchDatastore_Task, 'duration_secs': 0.017532} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.025690] env[69475]: DEBUG oslo_concurrency.lockutils [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.025969] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db/15d6546a-b73d-4e7c-b90b-1cd34a5eb1db.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1212.026248] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-769dbbd6-6b45-4d5a-80d1-9dd7fa456c1f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.034274] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1212.034274] env[69475]: value = "task-3509140" [ 1212.034274] env[69475]: _type = "Task" [ 1212.034274] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.038732] env[69475]: INFO nova.virt.block_device [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Booting with volume 1a0e6d06-d9be-4cb5-8898-b91d8b1fff30 at /dev/sda [ 1212.045600] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509140, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.077829] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d1d11ad3-cc6a-4a26-9bbc-1ac5c75bcb1d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.088216] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171cc7e0-1be0-4e1c-b6ca-2b6aec935f86 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.099302] env[69475]: INFO nova.network.neutron [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Port 1ef95bec-a8fb-4ee7-b99a-299bf62af225 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1212.099670] env[69475]: DEBUG nova.network.neutron [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Updating instance_info_cache with network_info: [{"id": "6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c", "address": "fa:16:3e:41:b4:2a", "network": {"id": "801aee55-f715-4cdf-b89c-184ca3f24866", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1512419082-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b2ba1a4125454d39bc92b6123447d98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4954661-ff70-43dd-bc60-8cbca6b9cbfa", "external-id": "nsx-vlan-transportzone-294", "segmentation_id": 294, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e3f5a22-9b", "ovs_interfaceid": "6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.101358] env[69475]: DEBUG nova.network.neutron [req-df1c48c1-3370-4dc7-838c-60311c9aa133 req-3e90d48c-fefd-4b55-9c04-268e1f4b8771 service nova] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Updated VIF entry in instance network info cache for port 24283fcb-3bd1-46b1-a7e5-bf792688cc87. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1212.101626] env[69475]: DEBUG nova.network.neutron [req-df1c48c1-3370-4dc7-838c-60311c9aa133 req-3e90d48c-fefd-4b55-9c04-268e1f4b8771 service nova] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Updating instance_info_cache with network_info: [{"id": "24283fcb-3bd1-46b1-a7e5-bf792688cc87", "address": "fa:16:3e:54:61:67", "network": {"id": "b825e883-3197-4a41-a94b-f363fe49fc0d", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1376961670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "67d27343d8c04fc9a2bed7a764f6cf82", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "94926d5b-bfab-4c04-85b5-0fe89934c8ff", "external-id": "nsx-vlan-transportzone-157", "segmentation_id": 157, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap24283fcb-3b", "ovs_interfaceid": "24283fcb-3bd1-46b1-a7e5-bf792688cc87", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.126347] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-45f77256-c545-4660-8953-0ea7f429e5b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.136569] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247c1d20-bdea-457c-9c88-78ec9dd43b1c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.176248] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5763a8e0-9978-40df-87ee-3cce43299c47 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.178838] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509139, 'name': ReconfigVM_Task, 'duration_secs': 0.361784} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.180022] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9944cfa3-96a2-4c96-9109-66e404149977 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.185338] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e5c7f0e-e927-4df5-aae6-53ea00e030a6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.201808] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1212.202074] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1212.202239] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1212.202422] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1212.202569] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1212.202717] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1212.202920] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1212.203094] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1212.203270] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1212.203436] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1212.203610] env[69475]: DEBUG nova.virt.hardware [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1212.204848] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4e42c66b-a05c-4146-aa4e-523bc46d4071 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.210819] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1212.210819] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5248bdfd-4184-f8f5-296c-a5644693cbd0" [ 1212.210819] env[69475]: _type = "Task" [ 1212.210819] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.218879] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5248bdfd-4184-f8f5-296c-a5644693cbd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.222475] env[69475]: DEBUG nova.virt.block_device [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updating existing volume attachment record: 2bf59737-d0b9-4f82-968e-b1ecb28318c3 {{(pid=69475) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1212.227972] env[69475]: INFO nova.compute.manager [-] [instance: f8a82046-4589-45d2-a7a3-466fe4d8f9c6] Took 1.28 seconds to deallocate network for instance. [ 1212.359257] env[69475]: DEBUG nova.compute.manager [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1212.359257] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1212.360135] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e140396b-a7bc-42a3-a325-ce418aa3180a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.370022] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1212.370022] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f10e302a-aa3a-42de-86da-fcb8b2e587ad {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.377058] env[69475]: DEBUG oslo_vmware.api [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1212.377058] env[69475]: value = "task-3509141" [ 1212.377058] env[69475]: _type = "Task" [ 1212.377058] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.385966] env[69475]: DEBUG oslo_vmware.api [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509141, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.546491] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509140, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.603569] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Releasing lock "refresh_cache-eadfee29-c7fc-4d33-8869-7ea8e753554c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.606348] env[69475]: DEBUG oslo_concurrency.lockutils [req-df1c48c1-3370-4dc7-838c-60311c9aa133 req-3e90d48c-fefd-4b55-9c04-268e1f4b8771 service nova] Releasing lock "refresh_cache-da3eff39-b80b-4574-9b07-df6f679a9f38" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.722991] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5248bdfd-4184-f8f5-296c-a5644693cbd0, 'name': SearchDatastore_Task, 'duration_secs': 0.010265} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.729344] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Reconfiguring VM instance instance-00000073 to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1212.730018] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-121594ee-24d8-4506-b6cd-67a2a9d8b398 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.744230] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.744230] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.744727] env[69475]: DEBUG nova.objects.instance [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lazy-loading 'resources' on Instance uuid f8a82046-4589-45d2-a7a3-466fe4d8f9c6 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1212.755196] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1212.755196] env[69475]: value = "task-3509142" [ 1212.755196] env[69475]: _type = "Task" [ 1212.755196] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.770784] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509142, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.785189] env[69475]: DEBUG nova.compute.manager [req-2bbcf2ff-0ee9-43d9-84a6-885925c0a470 req-be7d539b-3a93-4b62-9cea-87db2d1484ad service nova] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Received event network-vif-plugged-789f3dcb-d9c8-495b-b66c-896fb31e0e63 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1212.785504] env[69475]: DEBUG oslo_concurrency.lockutils [req-2bbcf2ff-0ee9-43d9-84a6-885925c0a470 req-be7d539b-3a93-4b62-9cea-87db2d1484ad service nova] Acquiring lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1212.785748] env[69475]: DEBUG oslo_concurrency.lockutils [req-2bbcf2ff-0ee9-43d9-84a6-885925c0a470 req-be7d539b-3a93-4b62-9cea-87db2d1484ad service nova] Lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1212.786209] env[69475]: DEBUG oslo_concurrency.lockutils [req-2bbcf2ff-0ee9-43d9-84a6-885925c0a470 req-be7d539b-3a93-4b62-9cea-87db2d1484ad service nova] Lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1212.786882] env[69475]: DEBUG nova.compute.manager [req-2bbcf2ff-0ee9-43d9-84a6-885925c0a470 req-be7d539b-3a93-4b62-9cea-87db2d1484ad service nova] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] No waiting events found dispatching network-vif-plugged-789f3dcb-d9c8-495b-b66c-896fb31e0e63 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1212.787133] env[69475]: WARNING nova.compute.manager [req-2bbcf2ff-0ee9-43d9-84a6-885925c0a470 req-be7d539b-3a93-4b62-9cea-87db2d1484ad service nova] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Received unexpected event network-vif-plugged-789f3dcb-d9c8-495b-b66c-896fb31e0e63 for instance with vm_state building and task_state block_device_mapping. [ 1212.890306] env[69475]: DEBUG oslo_vmware.api [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509141, 'name': PowerOffVM_Task, 'duration_secs': 0.23618} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.890306] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1212.890306] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1212.890306] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-70f33dbb-c6a8-4275-bba6-e4e93c790aea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.892615] env[69475]: DEBUG nova.network.neutron [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Successfully updated port: 789f3dcb-d9c8-495b-b66c-896fb31e0e63 {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1212.963236] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1212.963236] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1212.963476] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Deleting the datastore file [datastore2] eadfee29-c7fc-4d33-8869-7ea8e753554c {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1212.963649] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e7cac4ad-d8a9-4f03-858a-801464dbeb9b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.975626] env[69475]: DEBUG oslo_vmware.api [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1212.975626] env[69475]: value = "task-3509144" [ 1212.975626] env[69475]: _type = "Task" [ 1212.975626] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.986692] env[69475]: DEBUG oslo_vmware.api [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509144, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.046236] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509140, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.817255} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.046796] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db/15d6546a-b73d-4e7c-b90b-1cd34a5eb1db.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1213.046983] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1213.047460] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2fe424e1-8222-4e02-9d7e-4e2b60b42e7f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.054966] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1213.054966] env[69475]: value = "task-3509145" [ 1213.054966] env[69475]: _type = "Task" [ 1213.054966] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.063038] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509145, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.108062] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3a8fd701-887e-4825-8a78-e49abff62b7b tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "interface-eadfee29-c7fc-4d33-8869-7ea8e753554c-1ef95bec-a8fb-4ee7-b99a-299bf62af225" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.831s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.265474] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509142, 'name': ReconfigVM_Task, 'duration_secs': 0.428081} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.265624] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Reconfigured VM instance instance-00000073 to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1213.266454] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e43ad94-61c8-49de-8e44-598d6c5d2b33 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.293283] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954/0c1ee654-0d2e-40a8-b9a9-291c6a9ab954.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1213.295808] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b046034c-1842-4bf9-ab8d-72246a043894 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.317000] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1213.317000] env[69475]: value = "task-3509146" [ 1213.317000] env[69475]: _type = "Task" [ 1213.317000] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.341426] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509146, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.395383] env[69475]: DEBUG oslo_concurrency.lockutils [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "refresh_cache-319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1213.395501] env[69475]: DEBUG oslo_concurrency.lockutils [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "refresh_cache-319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1213.396651] env[69475]: DEBUG nova.network.neutron [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1213.441544] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62623c37-4b2d-4a51-b7ab-fb3328b469ae {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.449296] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6086902c-afb3-4257-9d3b-3aeb11191329 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.479760] env[69475]: DEBUG oslo_concurrency.lockutils [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.480022] env[69475]: DEBUG oslo_concurrency.lockutils [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.480221] env[69475]: DEBUG oslo_concurrency.lockutils [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1213.480401] env[69475]: DEBUG oslo_concurrency.lockutils [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1213.480567] env[69475]: DEBUG oslo_concurrency.lockutils [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1213.482882] env[69475]: INFO nova.compute.manager [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Terminating instance [ 1213.487458] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aec4e939-a6d3-4a5d-8314-9f99fdc5d979 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.497146] env[69475]: DEBUG oslo_vmware.api [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509144, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.500765] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffad43f7-1c70-47ee-bab2-4ba2dfc6af28 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.515350] env[69475]: DEBUG nova.compute.provider_tree [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1213.565257] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509145, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068503} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.565551] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1213.566349] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c92c97d-4d2a-4cad-b8c7-4502a591f269 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.589355] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db/15d6546a-b73d-4e7c-b90b-1cd34a5eb1db.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1213.589595] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca0b2f67-abde-4287-b6e5-72f05c5946a5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.611711] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1213.611711] env[69475]: value = "task-3509147" [ 1213.611711] env[69475]: _type = "Task" [ 1213.611711] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.620245] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509147, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.829242] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509146, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.928521] env[69475]: DEBUG nova.network.neutron [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1213.992640] env[69475]: DEBUG nova.compute.manager [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1213.992890] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1213.993235] env[69475]: DEBUG oslo_vmware.api [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509144, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.712651} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.994058] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b36f33-d6a5-4585-adcf-d403c522d8a8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.996687] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1213.996874] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1213.997113] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1213.997324] env[69475]: INFO nova.compute.manager [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1213.997565] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1213.998157] env[69475]: DEBUG nova.compute.manager [-] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1213.998274] env[69475]: DEBUG nova.network.neutron [-] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1214.007088] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1214.007341] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e61f6f77-657a-474f-a219-5db0e14ca510 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.014700] env[69475]: DEBUG oslo_vmware.api [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1214.014700] env[69475]: value = "task-3509148" [ 1214.014700] env[69475]: _type = "Task" [ 1214.014700] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.018597] env[69475]: DEBUG nova.scheduler.client.report [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1214.029130] env[69475]: DEBUG oslo_vmware.api [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509148, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.074200] env[69475]: DEBUG nova.network.neutron [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updating instance_info_cache with network_info: [{"id": "789f3dcb-d9c8-495b-b66c-896fb31e0e63", "address": "fa:16:3e:b0:88:42", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap789f3dcb-d9", "ovs_interfaceid": "789f3dcb-d9c8-495b-b66c-896fb31e0e63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.125107] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509147, 'name': ReconfigVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.327919] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509146, 'name': ReconfigVM_Task, 'duration_secs': 0.735727} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.328242] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954/0c1ee654-0d2e-40a8-b9a9-291c6a9ab954.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1214.329056] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8459e64a-5d51-473a-979e-6af7027f857f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.347304] env[69475]: DEBUG nova.compute.manager [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1214.347741] env[69475]: DEBUG nova.virt.hardware [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1214.347950] env[69475]: DEBUG nova.virt.hardware [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1214.348169] env[69475]: DEBUG nova.virt.hardware [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1214.348369] env[69475]: DEBUG nova.virt.hardware [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1214.348463] env[69475]: DEBUG nova.virt.hardware [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1214.348610] env[69475]: DEBUG nova.virt.hardware [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1214.348810] env[69475]: DEBUG nova.virt.hardware [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1214.348973] env[69475]: DEBUG nova.virt.hardware [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1214.349160] env[69475]: DEBUG nova.virt.hardware [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1214.349321] env[69475]: DEBUG nova.virt.hardware [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1214.349493] env[69475]: DEBUG nova.virt.hardware [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1214.350318] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af84c55c-ded9-4bf8-9a4f-d9ee772ab3eb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.352984] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10cb1242-d5e0-423c-b9fb-7702c2a4a1a5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.374711] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ad317db-a73d-4ead-b9f7-1bebe1c95987 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.378827] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f38998ec-0a13-416a-bc11-036d7d2952df {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.404880] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26195d7f-4390-4b38-b627-43ec9edbf4d5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.411713] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1214.411940] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-941d6e88-77db-453b-8848-09febbe4fa40 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.418179] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1214.418179] env[69475]: value = "task-3509149" [ 1214.418179] env[69475]: _type = "Task" [ 1214.418179] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.426286] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509149, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.525354] env[69475]: DEBUG oslo_vmware.api [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509148, 'name': PowerOffVM_Task, 'duration_secs': 0.229137} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.525627] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1214.525794] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1214.526092] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b37ab013-8b94-4187-99b3-b03e4aa3e7e9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.530499] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.787s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1214.552809] env[69475]: INFO nova.scheduler.client.report [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Deleted allocations for instance f8a82046-4589-45d2-a7a3-466fe4d8f9c6 [ 1214.577508] env[69475]: DEBUG oslo_concurrency.lockutils [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "refresh_cache-319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1214.577815] env[69475]: DEBUG nova.compute.manager [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Instance network_info: |[{"id": "789f3dcb-d9c8-495b-b66c-896fb31e0e63", "address": "fa:16:3e:b0:88:42", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap789f3dcb-d9", "ovs_interfaceid": "789f3dcb-d9c8-495b-b66c-896fb31e0e63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1214.578332] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:88:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee9ce73d-4ee8-4b28-b7d3-3a5735039627', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '789f3dcb-d9c8-495b-b66c-896fb31e0e63', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1214.588528] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1214.588528] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1214.588672] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1214.589189] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1214.589189] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Deleting the datastore file [datastore2] e10a197a-a9b7-43ce-b8a8-ce186619feb9 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1214.589189] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8724dd2e-46aa-46bb-9968-8fe440b98a64 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.605073] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebc516e7-8276-4a47-be50-4d09b4ee0bda {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.614154] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1214.614154] env[69475]: value = "task-3509152" [ 1214.614154] env[69475]: _type = "Task" [ 1214.614154] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.618642] env[69475]: DEBUG oslo_vmware.api [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for the task: (returnval){ [ 1214.618642] env[69475]: value = "task-3509151" [ 1214.618642] env[69475]: _type = "Task" [ 1214.618642] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.628786] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509147, 'name': ReconfigVM_Task, 'duration_secs': 0.534719} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.629441] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db/15d6546a-b73d-4e7c-b90b-1cd34a5eb1db.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1214.630129] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1b254733-dcc6-485a-ad64-87d8a8da0959 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.637338] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509152, 'name': CreateVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.637905] env[69475]: DEBUG oslo_vmware.api [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509151, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.646632] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1214.646632] env[69475]: value = "task-3509153" [ 1214.646632] env[69475]: _type = "Task" [ 1214.646632] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.668168] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509153, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.826264] env[69475]: DEBUG nova.compute.manager [req-a51199a3-e442-4516-9c21-cc27022b3177 req-8f2bc2c2-93df-44ae-9635-31ed1867bedc service nova] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Received event network-changed-789f3dcb-d9c8-495b-b66c-896fb31e0e63 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1214.826264] env[69475]: DEBUG nova.compute.manager [req-a51199a3-e442-4516-9c21-cc27022b3177 req-8f2bc2c2-93df-44ae-9635-31ed1867bedc service nova] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Refreshing instance network info cache due to event network-changed-789f3dcb-d9c8-495b-b66c-896fb31e0e63. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1214.827118] env[69475]: DEBUG oslo_concurrency.lockutils [req-a51199a3-e442-4516-9c21-cc27022b3177 req-8f2bc2c2-93df-44ae-9635-31ed1867bedc service nova] Acquiring lock "refresh_cache-319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1214.827118] env[69475]: DEBUG oslo_concurrency.lockutils [req-a51199a3-e442-4516-9c21-cc27022b3177 req-8f2bc2c2-93df-44ae-9635-31ed1867bedc service nova] Acquired lock "refresh_cache-319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1214.827118] env[69475]: DEBUG nova.network.neutron [req-a51199a3-e442-4516-9c21-cc27022b3177 req-8f2bc2c2-93df-44ae-9635-31ed1867bedc service nova] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Refreshing network info cache for port 789f3dcb-d9c8-495b-b66c-896fb31e0e63 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1214.896869] env[69475]: DEBUG nova.compute.manager [req-9d20c8b1-5b92-4527-8d64-daf4acd7e0e6 req-1660ed57-68ba-4404-b0b0-dfc30bc34af3 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Received event network-vif-deleted-6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1214.897194] env[69475]: INFO nova.compute.manager [req-9d20c8b1-5b92-4527-8d64-daf4acd7e0e6 req-1660ed57-68ba-4404-b0b0-dfc30bc34af3 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Neutron deleted interface 6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c; detaching it from the instance and deleting it from the info cache [ 1214.897291] env[69475]: DEBUG nova.network.neutron [req-9d20c8b1-5b92-4527-8d64-daf4acd7e0e6 req-1660ed57-68ba-4404-b0b0-dfc30bc34af3 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.929903] env[69475]: DEBUG oslo_vmware.api [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509149, 'name': PowerOnVM_Task, 'duration_secs': 0.500125} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1214.930090] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1215.062236] env[69475]: DEBUG oslo_concurrency.lockutils [None req-b1af8398-b10b-450a-9147-29c3aba99e3e tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "f8a82046-4589-45d2-a7a3-466fe4d8f9c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.740s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1215.134492] env[69475]: DEBUG oslo_vmware.api [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Task: {'id': task-3509151, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188559} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.134732] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509152, 'name': CreateVM_Task, 'duration_secs': 0.357935} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.134866] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1215.135066] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1215.135248] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1215.135417] env[69475]: INFO nova.compute.manager [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1215.135649] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1215.135781] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1215.136020] env[69475]: DEBUG nova.compute.manager [-] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1215.136129] env[69475]: DEBUG nova.network.neutron [-] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1215.138075] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701152', 'volume_id': '1a0e6d06-d9be-4cb5-8898-b91d8b1fff30', 'name': 'volume-1a0e6d06-d9be-4cb5-8898-b91d8b1fff30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2', 'attached_at': '', 'detached_at': '', 'volume_id': '1a0e6d06-d9be-4cb5-8898-b91d8b1fff30', 'serial': '1a0e6d06-d9be-4cb5-8898-b91d8b1fff30'}, 'device_type': None, 'attachment_id': '2bf59737-d0b9-4f82-968e-b1ecb28318c3', 'mount_device': '/dev/sda', 'delete_on_termination': True, 'boot_index': 0, 'guest_format': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=69475) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1215.138242] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Root volume attach. Driver type: vmdk {{(pid=69475) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1215.138979] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c287b96-4f9e-4179-8681-87bc3385ff0b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.147039] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc26fe24-20b2-40f9-9987-0b35b56310ab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.158633] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29eff4f4-ff40-4ce0-9fb1-75e781ea89d9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.161404] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509153, 'name': Rename_Task, 'duration_secs': 0.16664} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.162264] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1215.163011] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-591e584c-8053-426a-bb08-f716ac0c5ccb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.167282] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-b5713534-624a-432d-a020-995fae46d532 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.172813] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1215.172813] env[69475]: value = "task-3509154" [ 1215.172813] env[69475]: _type = "Task" [ 1215.172813] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.183867] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509154, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.185183] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1215.185183] env[69475]: value = "task-3509155" [ 1215.185183] env[69475]: _type = "Task" [ 1215.185183] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.193150] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509155, 'name': RelocateVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.242447] env[69475]: DEBUG nova.network.neutron [-] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.400363] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0f5d34d5-b747-431a-8498-586e2d87098e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.421940] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-345713c4-a64f-411a-b1bb-b88381a4a7c2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.467823] env[69475]: DEBUG nova.compute.manager [req-9d20c8b1-5b92-4527-8d64-daf4acd7e0e6 req-1660ed57-68ba-4404-b0b0-dfc30bc34af3 service nova] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Detach interface failed, port_id=6e3f5a22-9bfb-4b60-a9ac-66e0b6f4fc4c, reason: Instance eadfee29-c7fc-4d33-8869-7ea8e753554c could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1215.591967] env[69475]: DEBUG nova.network.neutron [req-a51199a3-e442-4516-9c21-cc27022b3177 req-8f2bc2c2-93df-44ae-9635-31ed1867bedc service nova] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updated VIF entry in instance network info cache for port 789f3dcb-d9c8-495b-b66c-896fb31e0e63. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1215.592326] env[69475]: DEBUG nova.network.neutron [req-a51199a3-e442-4516-9c21-cc27022b3177 req-8f2bc2c2-93df-44ae-9635-31ed1867bedc service nova] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updating instance_info_cache with network_info: [{"id": "789f3dcb-d9c8-495b-b66c-896fb31e0e63", "address": "fa:16:3e:b0:88:42", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap789f3dcb-d9", "ovs_interfaceid": "789f3dcb-d9c8-495b-b66c-896fb31e0e63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1215.686120] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509154, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.698565] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509155, 'name': RelocateVM_Task} progress is 20%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.748287] env[69475]: INFO nova.compute.manager [-] [instance: eadfee29-c7fc-4d33-8869-7ea8e753554c] Took 1.75 seconds to deallocate network for instance. [ 1215.947940] env[69475]: INFO nova.compute.manager [None req-c5309dd1-df9e-48cb-b2bc-b3a1628e1b52 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updating instance to original state: 'active' [ 1216.098012] env[69475]: DEBUG oslo_concurrency.lockutils [req-a51199a3-e442-4516-9c21-cc27022b3177 req-8f2bc2c2-93df-44ae-9635-31ed1867bedc service nova] Releasing lock "refresh_cache-319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1216.124351] env[69475]: DEBUG oslo_concurrency.lockutils [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "4100fb43-1dae-40b1-8caa-11dd67962274" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.124597] env[69475]: DEBUG oslo_concurrency.lockutils [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "4100fb43-1dae-40b1-8caa-11dd67962274" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1216.124801] env[69475]: DEBUG oslo_concurrency.lockutils [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "4100fb43-1dae-40b1-8caa-11dd67962274-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.124989] env[69475]: DEBUG oslo_concurrency.lockutils [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "4100fb43-1dae-40b1-8caa-11dd67962274-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1216.125179] env[69475]: DEBUG oslo_concurrency.lockutils [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "4100fb43-1dae-40b1-8caa-11dd67962274-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1216.127277] env[69475]: INFO nova.compute.manager [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Terminating instance [ 1216.157030] env[69475]: DEBUG nova.network.neutron [-] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1216.184296] env[69475]: DEBUG oslo_vmware.api [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509154, 'name': PowerOnVM_Task, 'duration_secs': 0.527758} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.184558] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1216.184756] env[69475]: INFO nova.compute.manager [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Took 8.72 seconds to spawn the instance on the hypervisor. [ 1216.184933] env[69475]: DEBUG nova.compute.manager [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1216.185701] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3e5dbc-f6b4-4db5-8042-044e8787ca72 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.201021] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509155, 'name': RelocateVM_Task, 'duration_secs': 0.597927} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.201021] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Volume attach. Driver type: vmdk {{(pid=69475) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1216.201021] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701152', 'volume_id': '1a0e6d06-d9be-4cb5-8898-b91d8b1fff30', 'name': 'volume-1a0e6d06-d9be-4cb5-8898-b91d8b1fff30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2', 'attached_at': '', 'detached_at': '', 'volume_id': '1a0e6d06-d9be-4cb5-8898-b91d8b1fff30', 'serial': '1a0e6d06-d9be-4cb5-8898-b91d8b1fff30'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1216.201745] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1002020-a0a8-421a-a838-231c29c4e3fd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.217653] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-179599a5-060e-47d3-beee-e882f6365a6e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.242456] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] volume-1a0e6d06-d9be-4cb5-8898-b91d8b1fff30/volume-1a0e6d06-d9be-4cb5-8898-b91d8b1fff30.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1216.243476] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fb3734b-01b0-4fec-aeda-f74a51c3ce0f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.259200] env[69475]: DEBUG oslo_concurrency.lockutils [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1216.259483] env[69475]: DEBUG oslo_concurrency.lockutils [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1216.259700] env[69475]: DEBUG nova.objects.instance [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lazy-loading 'resources' on Instance uuid eadfee29-c7fc-4d33-8869-7ea8e753554c {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1216.268233] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1216.268233] env[69475]: value = "task-3509156" [ 1216.268233] env[69475]: _type = "Task" [ 1216.268233] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.277709] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509156, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.634032] env[69475]: DEBUG nova.compute.manager [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1216.634032] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1216.634032] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8060e2f8-2e9c-4f55-9052-dbb85c825da1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.641354] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1216.641788] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bca21aa6-024c-4d6d-9262-89e0d94e9698 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.648827] env[69475]: DEBUG oslo_vmware.api [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1216.648827] env[69475]: value = "task-3509157" [ 1216.648827] env[69475]: _type = "Task" [ 1216.648827] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.658828] env[69475]: INFO nova.compute.manager [-] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Took 1.52 seconds to deallocate network for instance. [ 1216.659410] env[69475]: DEBUG oslo_vmware.api [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509157, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.712672] env[69475]: INFO nova.compute.manager [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Took 14.63 seconds to build instance. [ 1216.780639] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509156, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.930375] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4329f063-b441-47c8-92bd-836a75218687 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.940842] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b09f980-e058-4b50-97b5-bfcc0bc78281 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.981420] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c2e624-06da-4b11-a3ce-c7be942ddd18 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.991822] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1845dd57-c619-4eaf-916f-eca5c03d5376 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.002290] env[69475]: DEBUG nova.compute.manager [req-c3dadff1-8836-491c-aa17-0af8daa72669 req-71088b67-21a6-4529-b47f-49aa75e351ee service nova] [instance: e10a197a-a9b7-43ce-b8a8-ce186619feb9] Received event network-vif-deleted-e27cfabc-cd13-4aaa-b9e1-eebffb18225e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1217.002781] env[69475]: DEBUG nova.compute.manager [req-c3dadff1-8836-491c-aa17-0af8daa72669 req-71088b67-21a6-4529-b47f-49aa75e351ee service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Received event network-changed-530ddca5-14b1-40c3-912c-998398a229c1 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1217.002781] env[69475]: DEBUG nova.compute.manager [req-c3dadff1-8836-491c-aa17-0af8daa72669 req-71088b67-21a6-4529-b47f-49aa75e351ee service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Refreshing instance network info cache due to event network-changed-530ddca5-14b1-40c3-912c-998398a229c1. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1217.003011] env[69475]: DEBUG oslo_concurrency.lockutils [req-c3dadff1-8836-491c-aa17-0af8daa72669 req-71088b67-21a6-4529-b47f-49aa75e351ee service nova] Acquiring lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1217.003871] env[69475]: DEBUG oslo_concurrency.lockutils [req-c3dadff1-8836-491c-aa17-0af8daa72669 req-71088b67-21a6-4529-b47f-49aa75e351ee service nova] Acquired lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1217.004424] env[69475]: DEBUG nova.network.neutron [req-c3dadff1-8836-491c-aa17-0af8daa72669 req-71088b67-21a6-4529-b47f-49aa75e351ee service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Refreshing network info cache for port 530ddca5-14b1-40c3-912c-998398a229c1 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1217.016072] env[69475]: DEBUG nova.compute.provider_tree [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1217.159106] env[69475]: DEBUG oslo_vmware.api [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509157, 'name': PowerOffVM_Task, 'duration_secs': 0.231683} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.159381] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1217.159551] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1217.159805] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5bdc60f6-12cf-44a5-a6e8-3c9b3b167c36 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.166611] env[69475]: DEBUG oslo_concurrency.lockutils [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1217.213957] env[69475]: DEBUG oslo_concurrency.lockutils [None req-81002b39-6258-4269-b40c-8c422c5ca078 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.154s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.242778] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1217.243035] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1217.243251] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Deleting the datastore file [datastore1] 4100fb43-1dae-40b1-8caa-11dd67962274 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1217.243466] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f87cd42f-bfe2-41af-8ad6-99cd733a7a8a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.254414] env[69475]: DEBUG oslo_vmware.api [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for the task: (returnval){ [ 1217.254414] env[69475]: value = "task-3509159" [ 1217.254414] env[69475]: _type = "Task" [ 1217.254414] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.262742] env[69475]: DEBUG oslo_vmware.api [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509159, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.277269] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509156, 'name': ReconfigVM_Task, 'duration_secs': 0.696944} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.277536] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Reconfigured VM instance instance-0000007b to attach disk [datastore1] volume-1a0e6d06-d9be-4cb5-8898-b91d8b1fff30/volume-1a0e6d06-d9be-4cb5-8898-b91d8b1fff30.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1217.282623] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f53320c-2d48-4f57-bdaf-72f1413c04ab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.311580] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1217.311580] env[69475]: value = "task-3509160" [ 1217.311580] env[69475]: _type = "Task" [ 1217.311580] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.320492] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509160, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.373099] env[69475]: DEBUG nova.network.neutron [req-c3dadff1-8836-491c-aa17-0af8daa72669 req-71088b67-21a6-4529-b47f-49aa75e351ee service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Updated VIF entry in instance network info cache for port 530ddca5-14b1-40c3-912c-998398a229c1. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1217.373467] env[69475]: DEBUG nova.network.neutron [req-c3dadff1-8836-491c-aa17-0af8daa72669 req-71088b67-21a6-4529-b47f-49aa75e351ee service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Updating instance_info_cache with network_info: [{"id": "530ddca5-14b1-40c3-912c-998398a229c1", "address": "fa:16:3e:6d:c6:28", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap530ddca5-14", "ovs_interfaceid": "530ddca5-14b1-40c3-912c-998398a229c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1217.521388] env[69475]: DEBUG nova.scheduler.client.report [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1217.765793] env[69475]: DEBUG oslo_vmware.api [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Task: {'id': task-3509159, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199575} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.766217] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1217.766321] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1217.766476] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1217.766655] env[69475]: INFO nova.compute.manager [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1217.766915] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1217.767144] env[69475]: DEBUG nova.compute.manager [-] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1217.767248] env[69475]: DEBUG nova.network.neutron [-] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1217.821381] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509160, 'name': ReconfigVM_Task, 'duration_secs': 0.157214} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.821767] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701152', 'volume_id': '1a0e6d06-d9be-4cb5-8898-b91d8b1fff30', 'name': 'volume-1a0e6d06-d9be-4cb5-8898-b91d8b1fff30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2', 'attached_at': '', 'detached_at': '', 'volume_id': '1a0e6d06-d9be-4cb5-8898-b91d8b1fff30', 'serial': '1a0e6d06-d9be-4cb5-8898-b91d8b1fff30'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1217.822332] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84597585-8839-46b3-b2da-b7015be5c0eb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.830824] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1217.830824] env[69475]: value = "task-3509161" [ 1217.830824] env[69475]: _type = "Task" [ 1217.830824] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.841378] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509161, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.878335] env[69475]: DEBUG oslo_concurrency.lockutils [req-c3dadff1-8836-491c-aa17-0af8daa72669 req-71088b67-21a6-4529-b47f-49aa75e351ee service nova] Releasing lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1217.878816] env[69475]: DEBUG oslo_concurrency.lockutils [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1217.879051] env[69475]: DEBUG oslo_concurrency.lockutils [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.879253] env[69475]: DEBUG oslo_concurrency.lockutils [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "0c1ee654-0d2e-40a8-b9a9-291c6a9ab954-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1217.879436] env[69475]: DEBUG oslo_concurrency.lockutils [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "0c1ee654-0d2e-40a8-b9a9-291c6a9ab954-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1217.879596] env[69475]: DEBUG oslo_concurrency.lockutils [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "0c1ee654-0d2e-40a8-b9a9-291c6a9ab954-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1217.881703] env[69475]: INFO nova.compute.manager [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Terminating instance [ 1218.027108] env[69475]: DEBUG oslo_concurrency.lockutils [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.767s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1218.029541] env[69475]: DEBUG oslo_concurrency.lockutils [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.863s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.029778] env[69475]: DEBUG nova.objects.instance [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lazy-loading 'resources' on Instance uuid e10a197a-a9b7-43ce-b8a8-ce186619feb9 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1218.051742] env[69475]: INFO nova.scheduler.client.report [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Deleted allocations for instance eadfee29-c7fc-4d33-8869-7ea8e753554c [ 1218.341828] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509161, 'name': Rename_Task, 'duration_secs': 0.159102} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.342188] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1218.342540] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f33bde56-51c7-4d2e-9af7-883eacb0d917 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.350698] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1218.350698] env[69475]: value = "task-3509162" [ 1218.350698] env[69475]: _type = "Task" [ 1218.350698] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.359749] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509162, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.385743] env[69475]: DEBUG nova.compute.manager [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1218.386663] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1218.386997] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08df6e61-0265-4b82-b304-db509786ef46 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.396082] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1218.396082] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7dad3ac7-ba8a-4b39-9852-56175c6a8b9d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.408475] env[69475]: DEBUG oslo_vmware.api [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1218.408475] env[69475]: value = "task-3509163" [ 1218.408475] env[69475]: _type = "Task" [ 1218.408475] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.418289] env[69475]: DEBUG oslo_vmware.api [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509163, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.485979] env[69475]: DEBUG nova.network.neutron [-] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1218.559899] env[69475]: DEBUG oslo_concurrency.lockutils [None req-08a6f03b-42f2-4395-b23a-57ba65adfb71 tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "eadfee29-c7fc-4d33-8869-7ea8e753554c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.708s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1218.670741] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f483d27-cdd9-4d83-95fa-dd1d84ce99fc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.679969] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216df97c-2eee-42a3-936c-df804d2ff53f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.712387] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2eb68e-1a03-420d-b863-5830304879e6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.720968] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69dd84fd-e9ce-488c-b091-54b6c1543451 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.736048] env[69475]: DEBUG nova.compute.provider_tree [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1218.861752] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509162, 'name': PowerOnVM_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.919265] env[69475]: DEBUG oslo_vmware.api [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509163, 'name': PowerOffVM_Task, 'duration_secs': 0.208831} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.919529] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1218.919707] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1218.919949] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34a6d40a-14d0-463a-a8df-4b2be49c16c2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.989495] env[69475]: INFO nova.compute.manager [-] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Took 1.22 seconds to deallocate network for instance. [ 1219.030048] env[69475]: DEBUG nova.compute.manager [req-7a845a0d-6006-412c-8bab-4b0346496811 req-dac13b33-dc16-4d40-adbb-2a0afcc6a4e1 service nova] [instance: 4100fb43-1dae-40b1-8caa-11dd67962274] Received event network-vif-deleted-72e7aa25-953c-4253-8e6e-6543fd67af89 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1219.239635] env[69475]: DEBUG nova.scheduler.client.report [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1219.362965] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509162, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.496830] env[69475]: DEBUG oslo_concurrency.lockutils [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1219.699443] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1219.699712] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1219.699922] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1219.700120] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1219.700293] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.702427] env[69475]: INFO nova.compute.manager [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Terminating instance [ 1219.744614] env[69475]: DEBUG oslo_concurrency.lockutils [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.715s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.747036] env[69475]: DEBUG oslo_concurrency.lockutils [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.250s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1219.747337] env[69475]: DEBUG nova.objects.instance [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lazy-loading 'resources' on Instance uuid 4100fb43-1dae-40b1-8caa-11dd67962274 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1219.768663] env[69475]: INFO nova.scheduler.client.report [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Deleted allocations for instance e10a197a-a9b7-43ce-b8a8-ce186619feb9 [ 1219.862449] env[69475]: DEBUG oslo_vmware.api [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509162, 'name': PowerOnVM_Task, 'duration_secs': 1.023832} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.862787] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1219.862886] env[69475]: INFO nova.compute.manager [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Took 5.52 seconds to spawn the instance on the hypervisor. [ 1219.863082] env[69475]: DEBUG nova.compute.manager [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1219.863895] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce634fb6-6a82-41d5-bf28-7fe5ab64cef2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.206472] env[69475]: DEBUG nova.compute.manager [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1220.206690] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1220.207629] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50655113-21b8-468a-a23d-6bb15409195b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.217153] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1220.217385] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-786a814c-bb08-424a-9e1e-daccc8872fad {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.225134] env[69475]: DEBUG oslo_vmware.api [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1220.225134] env[69475]: value = "task-3509165" [ 1220.225134] env[69475]: _type = "Task" [ 1220.225134] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.233385] env[69475]: DEBUG oslo_vmware.api [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509165, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.276594] env[69475]: DEBUG oslo_concurrency.lockutils [None req-772e6357-dac3-4aa0-a045-b92f30509ab1 tempest-AttachVolumeShelveTestJSON-386882206 tempest-AttachVolumeShelveTestJSON-386882206-project-member] Lock "e10a197a-a9b7-43ce-b8a8-ce186619feb9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.796s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.380185] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6997911-8d3a-425c-b8c7-70afbc6784eb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.385950] env[69475]: INFO nova.compute.manager [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Took 15.66 seconds to build instance. [ 1220.390978] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9e62b7-989f-46e9-9fe5-8ffb18029546 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.424829] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4c63d32-8bd8-4211-a431-cf8c90fdd213 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.433745] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117cf068-c023-4c1e-af56-e6e878325d35 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.448171] env[69475]: DEBUG nova.compute.provider_tree [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1220.475355] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1220.475736] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1220.476171] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Deleting the datastore file [datastore1] 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1220.476528] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d02e4145-60fc-4722-a624-27cb93bb31a6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.483852] env[69475]: DEBUG oslo_vmware.api [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1220.483852] env[69475]: value = "task-3509166" [ 1220.483852] env[69475]: _type = "Task" [ 1220.483852] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.492653] env[69475]: DEBUG oslo_vmware.api [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509166, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.735123] env[69475]: DEBUG oslo_vmware.api [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509165, 'name': PowerOffVM_Task, 'duration_secs': 0.256276} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.735403] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1220.735573] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1220.735823] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a457172d-8a94-42c9-a05e-6a05a4244ddd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.813758] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1220.814274] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1220.814516] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Deleting the datastore file [datastore2] 579b4d3e-bd76-4f5d-b972-7b289bca04a0 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1220.814757] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-48f305a4-ab92-4a08-905a-01d8a8f45602 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.829527] env[69475]: DEBUG oslo_vmware.api [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for the task: (returnval){ [ 1220.829527] env[69475]: value = "task-3509168" [ 1220.829527] env[69475]: _type = "Task" [ 1220.829527] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1220.843585] env[69475]: DEBUG oslo_vmware.api [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509168, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1220.888337] env[69475]: DEBUG oslo_concurrency.lockutils [None req-026a7cff-dde6-4763-803b-ecc280dc514f tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.180s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.951828] env[69475]: DEBUG nova.scheduler.client.report [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1220.994691] env[69475]: DEBUG oslo_vmware.api [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509166, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146578} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1220.994960] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1220.995169] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1220.995368] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1220.995543] env[69475]: INFO nova.compute.manager [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Took 2.61 seconds to destroy the instance on the hypervisor. [ 1220.995783] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1220.996021] env[69475]: DEBUG nova.compute.manager [-] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1220.996154] env[69475]: DEBUG nova.network.neutron [-] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1221.341706] env[69475]: DEBUG oslo_vmware.api [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Task: {'id': task-3509168, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162182} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1221.341961] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1221.342212] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1221.342400] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1221.342578] env[69475]: INFO nova.compute.manager [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1221.342818] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1221.343020] env[69475]: DEBUG nova.compute.manager [-] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1221.343122] env[69475]: DEBUG nova.network.neutron [-] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1221.462056] env[69475]: DEBUG oslo_concurrency.lockutils [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.715s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1221.487727] env[69475]: INFO nova.scheduler.client.report [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Deleted allocations for instance 4100fb43-1dae-40b1-8caa-11dd67962274 [ 1221.621304] env[69475]: DEBUG nova.compute.manager [req-a5ea8568-314b-45f9-a430-bb322d3c31b5 req-2dff2920-bd45-4d5c-b5de-ea1a36b4e241 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Received event network-vif-deleted-f953a932-b0a0-4620-ae5b-9a9cda24d9a4 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1221.621519] env[69475]: INFO nova.compute.manager [req-a5ea8568-314b-45f9-a430-bb322d3c31b5 req-2dff2920-bd45-4d5c-b5de-ea1a36b4e241 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Neutron deleted interface f953a932-b0a0-4620-ae5b-9a9cda24d9a4; detaching it from the instance and deleting it from the info cache [ 1221.621693] env[69475]: DEBUG nova.network.neutron [req-a5ea8568-314b-45f9-a430-bb322d3c31b5 req-2dff2920-bd45-4d5c-b5de-ea1a36b4e241 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.999487] env[69475]: DEBUG oslo_concurrency.lockutils [None req-de0e82bf-5a54-41bb-8416-f6e6883cdd18 tempest-ServerRescueNegativeTestJSON-151899305 tempest-ServerRescueNegativeTestJSON-151899305-project-member] Lock "4100fb43-1dae-40b1-8caa-11dd67962274" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.875s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1222.098080] env[69475]: DEBUG nova.network.neutron [-] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.115547] env[69475]: DEBUG nova.compute.manager [req-94a8c703-fa8b-4e82-a7f2-63a77619b0ca req-637930f3-6de1-4a5d-bf98-32baa5cf6691 service nova] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Received event network-vif-deleted-5b51cc5d-6e38-423f-8f69-13541ea8a317 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1222.115776] env[69475]: INFO nova.compute.manager [req-94a8c703-fa8b-4e82-a7f2-63a77619b0ca req-637930f3-6de1-4a5d-bf98-32baa5cf6691 service nova] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Neutron deleted interface 5b51cc5d-6e38-423f-8f69-13541ea8a317; detaching it from the instance and deleting it from the info cache [ 1222.116029] env[69475]: DEBUG nova.network.neutron [req-94a8c703-fa8b-4e82-a7f2-63a77619b0ca req-637930f3-6de1-4a5d-bf98-32baa5cf6691 service nova] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.127630] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-824d15bb-fc85-43b3-9a21-640368f8bcdb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.139032] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca27fc6-89c1-40b5-8441-242aca7a6d3d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.180783] env[69475]: DEBUG nova.compute.manager [req-a5ea8568-314b-45f9-a430-bb322d3c31b5 req-2dff2920-bd45-4d5c-b5de-ea1a36b4e241 service nova] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Detach interface failed, port_id=f953a932-b0a0-4620-ae5b-9a9cda24d9a4, reason: Instance 579b4d3e-bd76-4f5d-b972-7b289bca04a0 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1222.596604] env[69475]: DEBUG nova.network.neutron [-] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1222.602914] env[69475]: INFO nova.compute.manager [-] [instance: 579b4d3e-bd76-4f5d-b972-7b289bca04a0] Took 1.26 seconds to deallocate network for instance. [ 1222.623122] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1584a079-5b8b-4720-b01b-a1686e806014 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.635430] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d4a27a5-088a-4a89-8178-4d02bf08d5ee {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.671936] env[69475]: DEBUG nova.compute.manager [req-94a8c703-fa8b-4e82-a7f2-63a77619b0ca req-637930f3-6de1-4a5d-bf98-32baa5cf6691 service nova] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Detach interface failed, port_id=5b51cc5d-6e38-423f-8f69-13541ea8a317, reason: Instance 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1223.104031] env[69475]: INFO nova.compute.manager [-] [instance: 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954] Took 2.11 seconds to deallocate network for instance. [ 1223.111992] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1223.112332] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1223.112544] env[69475]: DEBUG nova.objects.instance [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lazy-loading 'resources' on Instance uuid 579b4d3e-bd76-4f5d-b972-7b289bca04a0 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1223.610988] env[69475]: DEBUG oslo_concurrency.lockutils [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1223.660228] env[69475]: DEBUG nova.compute.manager [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Stashing vm_state: active {{(pid=69475) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1223.779990] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5e415b5-50c5-4e86-a4e1-0f6f0faca0e1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.787424] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b9a72d-b1d8-4670-901a-92eb4c3b910a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.822421] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4fbee06-6ef9-4f47-bfa5-81589617bfa4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.832360] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb3d67b-55c1-4290-bcce-ee3698c313cf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.847414] env[69475]: DEBUG nova.compute.provider_tree [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1224.149705] env[69475]: DEBUG nova.compute.manager [req-a1c166c1-3ee5-4163-a956-172b3143a7b9 req-44829ba5-4fab-4e0c-b69b-14de5ea8f707 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Received event network-changed-ed004f95-f0d0-434e-a13d-54bff688d74e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1224.149951] env[69475]: DEBUG nova.compute.manager [req-a1c166c1-3ee5-4163-a956-172b3143a7b9 req-44829ba5-4fab-4e0c-b69b-14de5ea8f707 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Refreshing instance network info cache due to event network-changed-ed004f95-f0d0-434e-a13d-54bff688d74e. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1224.150271] env[69475]: DEBUG oslo_concurrency.lockutils [req-a1c166c1-3ee5-4163-a956-172b3143a7b9 req-44829ba5-4fab-4e0c-b69b-14de5ea8f707 service nova] Acquiring lock "refresh_cache-8d50b322-fa03-4e48-b74b-a63578e4701c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1224.150484] env[69475]: DEBUG oslo_concurrency.lockutils [req-a1c166c1-3ee5-4163-a956-172b3143a7b9 req-44829ba5-4fab-4e0c-b69b-14de5ea8f707 service nova] Acquired lock "refresh_cache-8d50b322-fa03-4e48-b74b-a63578e4701c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1224.150658] env[69475]: DEBUG nova.network.neutron [req-a1c166c1-3ee5-4163-a956-172b3143a7b9 req-44829ba5-4fab-4e0c-b69b-14de5ea8f707 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Refreshing network info cache for port ed004f95-f0d0-434e-a13d-54bff688d74e {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1224.179468] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1224.350833] env[69475]: DEBUG nova.scheduler.client.report [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1224.482843] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.483115] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.483286] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.483436] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.483583] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.483726] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.483867] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._sync_power_states {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1224.856092] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.743s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1224.858569] env[69475]: DEBUG oslo_concurrency.lockutils [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.248s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1224.858796] env[69475]: DEBUG oslo_concurrency.lockutils [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1224.860884] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.682s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1224.882859] env[69475]: INFO nova.scheduler.client.report [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Deleted allocations for instance 579b4d3e-bd76-4f5d-b972-7b289bca04a0 [ 1224.887953] env[69475]: INFO nova.scheduler.client.report [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Deleted allocations for instance 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954 [ 1224.941472] env[69475]: DEBUG nova.network.neutron [req-a1c166c1-3ee5-4163-a956-172b3143a7b9 req-44829ba5-4fab-4e0c-b69b-14de5ea8f707 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Updated VIF entry in instance network info cache for port ed004f95-f0d0-434e-a13d-54bff688d74e. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1224.941472] env[69475]: DEBUG nova.network.neutron [req-a1c166c1-3ee5-4163-a956-172b3143a7b9 req-44829ba5-4fab-4e0c-b69b-14de5ea8f707 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Updating instance_info_cache with network_info: [{"id": "ed004f95-f0d0-434e-a13d-54bff688d74e", "address": "fa:16:3e:3d:0c:05", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped004f95-f0", "ovs_interfaceid": "ed004f95-f0d0-434e-a13d-54bff688d74e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1224.987971] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Getting list of instances from cluster (obj){ [ 1224.987971] env[69475]: value = "domain-c8" [ 1224.987971] env[69475]: _type = "ClusterComputeResource" [ 1224.987971] env[69475]: } {{(pid=69475) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1224.989702] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed630b89-1690-42b3-8df3-2a0fec5171a6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.014467] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Got total of 6 instances {{(pid=69475) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1225.014727] env[69475]: WARNING nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] While synchronizing instance power states, found 8 instances in the database and 6 instances on the hypervisor. [ 1225.014948] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Triggering sync for uuid 8d50b322-fa03-4e48-b74b-a63578e4701c {{(pid=69475) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1225.015265] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Triggering sync for uuid 579b4d3e-bd76-4f5d-b972-7b289bca04a0 {{(pid=69475) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1225.015507] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Triggering sync for uuid 0c1ee654-0d2e-40a8-b9a9-291c6a9ab954 {{(pid=69475) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1225.015744] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Triggering sync for uuid d63ddc35-06b3-43a2-bdd5-a91cf4047a4b {{(pid=69475) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1225.015971] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Triggering sync for uuid b6a785b0-7ae8-4856-b5a8-e017cfd376d8 {{(pid=69475) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1225.016248] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Triggering sync for uuid da3eff39-b80b-4574-9b07-df6f679a9f38 {{(pid=69475) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1225.016529] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Triggering sync for uuid 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db {{(pid=69475) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1225.016693] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Triggering sync for uuid 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2 {{(pid=69475) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1225.017176] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "8d50b322-fa03-4e48-b74b-a63578e4701c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.017492] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "8d50b322-fa03-4e48-b74b-a63578e4701c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.017865] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.018187] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.018488] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.018746] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.019085] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "b6a785b0-7ae8-4856-b5a8-e017cfd376d8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.019360] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "b6a785b0-7ae8-4856-b5a8-e017cfd376d8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.019688] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "da3eff39-b80b-4574-9b07-df6f679a9f38" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.019948] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "da3eff39-b80b-4574-9b07-df6f679a9f38" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.020290] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.020558] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.020886] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.021165] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.021413] env[69475]: INFO nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] During sync_power_state the instance has a pending task (resize_prep). Skip. [ 1225.021652] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.021892] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.022157] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69475) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1225.023197] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfc679d1-1a2a-4344-b0f9-3b228e85bc13 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.027883] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a50831a-98ee-492c-9ce9-96d378881a57 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.032100] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a3550fd-3c9b-4d4c-9094-4f23cc5e4b23 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.036399] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9994a672-99ae-4ef4-8636-196b988012fb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.040670] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ee5931-e587-446b-b044-1e23053d9921 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.044296] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager.update_available_resource {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.341714] env[69475]: DEBUG oslo_concurrency.lockutils [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "51667db3-801d-4a59-b4ee-220cbf638728" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.341952] env[69475]: DEBUG oslo_concurrency.lockutils [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "51667db3-801d-4a59-b4ee-220cbf638728" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.342640] env[69475]: WARNING oslo_messaging._drivers.amqpdriver [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1225.366353] env[69475]: INFO nova.compute.claims [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1225.391865] env[69475]: DEBUG oslo_concurrency.lockutils [None req-833777e6-3b45-4d58-95de-1d68b796690c tempest-AttachInterfacesTestJSON-1478316462 tempest-AttachInterfacesTestJSON-1478316462-project-member] Lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.692s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.392791] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.375s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.393251] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c65bdbe0-d8c7-4ffa-8731-e260ba7ad80e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.398677] env[69475]: DEBUG oslo_concurrency.lockutils [None req-002aabfd-427b-405b-8c38-d27bc9eb0469 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.520s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.399547] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.381s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.400185] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-95dce4fa-cda6-4cd4-b134-434350294466 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.408084] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-190128c7-9552-430f-851e-eafa9b76446e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.422963] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8435ea-5eea-4ab2-91a1-1be370007f52 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.445680] env[69475]: DEBUG oslo_concurrency.lockutils [req-a1c166c1-3ee5-4163-a956-172b3143a7b9 req-44829ba5-4fab-4e0c-b69b-14de5ea8f707 service nova] Releasing lock "refresh_cache-8d50b322-fa03-4e48-b74b-a63578e4701c" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1225.445930] env[69475]: DEBUG nova.compute.manager [req-a1c166c1-3ee5-4163-a956-172b3143a7b9 req-44829ba5-4fab-4e0c-b69b-14de5ea8f707 service nova] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Received event network-changed-789f3dcb-d9c8-495b-b66c-896fb31e0e63 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1225.446146] env[69475]: DEBUG nova.compute.manager [req-a1c166c1-3ee5-4163-a956-172b3143a7b9 req-44829ba5-4fab-4e0c-b69b-14de5ea8f707 service nova] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Refreshing instance network info cache due to event network-changed-789f3dcb-d9c8-495b-b66c-896fb31e0e63. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1225.446366] env[69475]: DEBUG oslo_concurrency.lockutils [req-a1c166c1-3ee5-4163-a956-172b3143a7b9 req-44829ba5-4fab-4e0c-b69b-14de5ea8f707 service nova] Acquiring lock "refresh_cache-319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1225.446514] env[69475]: DEBUG oslo_concurrency.lockutils [req-a1c166c1-3ee5-4163-a956-172b3143a7b9 req-44829ba5-4fab-4e0c-b69b-14de5ea8f707 service nova] Acquired lock "refresh_cache-319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1225.446722] env[69475]: DEBUG nova.network.neutron [req-a1c166c1-3ee5-4163-a956-172b3143a7b9 req-44829ba5-4fab-4e0c-b69b-14de5ea8f707 service nova] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Refreshing network info cache for port 789f3dcb-d9c8-495b-b66c-896fb31e0e63 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1225.548506] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.563301] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.543s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.563699] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "8d50b322-fa03-4e48-b74b-a63578e4701c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.546s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.564016] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.545s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.564344] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "b6a785b0-7ae8-4856-b5a8-e017cfd376d8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.545s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.570777] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "da3eff39-b80b-4574-9b07-df6f679a9f38" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.551s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.707751] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "cdd3f5d9-c980-41ff-92b4-14948ee00631" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1225.708031] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "cdd3f5d9-c980-41ff-92b4-14948ee00631" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1225.844644] env[69475]: DEBUG nova.compute.manager [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1225.874451] env[69475]: INFO nova.compute.resource_tracker [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updating resource usage from migration 7ad5e6f9-82b2-44d2-8fef-87f39d7f662d [ 1225.949018] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "579b4d3e-bd76-4f5d-b972-7b289bca04a0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.556s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1225.968500] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "0c1ee654-0d2e-40a8-b9a9-291c6a9ab954" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.569s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1226.004846] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be7280c-0fc9-4f54-9ca2-b51dc9d834ea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.012751] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a78c79b-9792-4d43-b649-067d3a74a4ee {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.048128] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f742fdd7-6f2e-4a78-b09d-2d5c2f70d395 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.056783] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b74c92d1-91ab-4f01-a299-d94df84e9109 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.072265] env[69475]: DEBUG nova.compute.provider_tree [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1226.210497] env[69475]: DEBUG nova.compute.manager [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1226.264274] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "edec6d3e-1881-4d6a-9e0f-c9a177e334ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.264521] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "edec6d3e-1881-4d6a-9e0f-c9a177e334ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1226.350977] env[69475]: DEBUG nova.network.neutron [req-a1c166c1-3ee5-4163-a956-172b3143a7b9 req-44829ba5-4fab-4e0c-b69b-14de5ea8f707 service nova] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updated VIF entry in instance network info cache for port 789f3dcb-d9c8-495b-b66c-896fb31e0e63. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1226.351348] env[69475]: DEBUG nova.network.neutron [req-a1c166c1-3ee5-4163-a956-172b3143a7b9 req-44829ba5-4fab-4e0c-b69b-14de5ea8f707 service nova] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updating instance_info_cache with network_info: [{"id": "789f3dcb-d9c8-495b-b66c-896fb31e0e63", "address": "fa:16:3e:b0:88:42", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap789f3dcb-d9", "ovs_interfaceid": "789f3dcb-d9c8-495b-b66c-896fb31e0e63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1226.374815] env[69475]: DEBUG oslo_concurrency.lockutils [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.575338] env[69475]: DEBUG nova.scheduler.client.report [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1226.736479] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1226.770025] env[69475]: DEBUG nova.compute.manager [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1226.857203] env[69475]: DEBUG oslo_concurrency.lockutils [req-a1c166c1-3ee5-4163-a956-172b3143a7b9 req-44829ba5-4fab-4e0c-b69b-14de5ea8f707 service nova] Releasing lock "refresh_cache-319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1227.081535] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.220s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.081535] env[69475]: INFO nova.compute.manager [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Migrating [ 1227.090696] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.542s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1227.090831] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.091867] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69475) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1227.091867] env[69475]: DEBUG oslo_concurrency.lockutils [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.717s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1227.093442] env[69475]: INFO nova.compute.claims [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1227.098955] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb3e975c-566c-4847-91d1-7fb1da1b44fc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.112654] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-673b6958-1f6d-4262-91c1-bb9c93cb2d98 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.134026] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2ca6ab3-723f-46d1-8534-0162aa0ab885 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.144586] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de441941-3b78-47da-881a-3bd6fba0dde6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.180134] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179106MB free_disk=89GB free_vcpus=48 pci_devices=None {{(pid=69475) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1227.180310] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1227.291710] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1227.606849] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "refresh_cache-319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.607237] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "refresh_cache-319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1227.607306] env[69475]: DEBUG nova.network.neutron [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1228.267755] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be980b1-625c-4874-8ddf-3fb2a1023f77 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.282834] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b411ed5-6dd9-49e3-b23b-0f2c9dc26794 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.315727] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4036f20b-7ea0-4a27-a58b-f253edb1c4b9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.324312] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b69dac55-e7e9-4398-a29b-2392a6a4c234 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.344024] env[69475]: DEBUG nova.compute.provider_tree [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.373892] env[69475]: DEBUG nova.network.neutron [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updating instance_info_cache with network_info: [{"id": "789f3dcb-d9c8-495b-b66c-896fb31e0e63", "address": "fa:16:3e:b0:88:42", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap789f3dcb-d9", "ovs_interfaceid": "789f3dcb-d9c8-495b-b66c-896fb31e0e63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.849582] env[69475]: DEBUG nova.scheduler.client.report [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1228.876628] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "refresh_cache-319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1229.354622] env[69475]: DEBUG oslo_concurrency.lockutils [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.263s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1229.355172] env[69475]: DEBUG nova.compute.manager [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1229.357780] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.622s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.359145] env[69475]: INFO nova.compute.claims [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1229.593847] env[69475]: DEBUG oslo_concurrency.lockutils [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Acquiring lock "2255e878-8890-46cb-a0b2-863702743691" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1229.594104] env[69475]: DEBUG oslo_concurrency.lockutils [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Lock "2255e878-8890-46cb-a0b2-863702743691" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1229.863371] env[69475]: DEBUG nova.compute.utils [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1229.867208] env[69475]: DEBUG nova.compute.manager [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Not allocating networking since 'none' was specified. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1230.096790] env[69475]: DEBUG nova.compute.manager [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1230.371215] env[69475]: DEBUG nova.compute.manager [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1230.390133] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af99bcd6-0951-4ab9-8e3b-c6ce55206299 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.421504] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updating instance '319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2' progress to 0 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1230.528861] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30ec18d1-276f-492b-9671-9eaf9b88abd5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.537819] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf4480f-6525-4777-b20b-0fb7557c6e07 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.568295] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce392582-e8b3-4890-801b-65c391d58f65 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.575939] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e1bb3b5-961f-48d8-94b2-ef8ffc124e85 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.589315] env[69475]: DEBUG nova.compute.provider_tree [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1230.615378] env[69475]: DEBUG oslo_concurrency.lockutils [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1230.927190] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1230.927553] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf448b33-fbbd-4616-a157-266ad87909c9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1230.935941] env[69475]: DEBUG oslo_vmware.api [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1230.935941] env[69475]: value = "task-3509170" [ 1230.935941] env[69475]: _type = "Task" [ 1230.935941] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1230.945781] env[69475]: DEBUG oslo_vmware.api [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509170, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.092349] env[69475]: DEBUG nova.scheduler.client.report [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1231.381454] env[69475]: DEBUG nova.compute.manager [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1231.408009] env[69475]: DEBUG nova.virt.hardware [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1231.408330] env[69475]: DEBUG nova.virt.hardware [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1231.408498] env[69475]: DEBUG nova.virt.hardware [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1231.408669] env[69475]: DEBUG nova.virt.hardware [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1231.408812] env[69475]: DEBUG nova.virt.hardware [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1231.408954] env[69475]: DEBUG nova.virt.hardware [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1231.409287] env[69475]: DEBUG nova.virt.hardware [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1231.409504] env[69475]: DEBUG nova.virt.hardware [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1231.409719] env[69475]: DEBUG nova.virt.hardware [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1231.409927] env[69475]: DEBUG nova.virt.hardware [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1231.410168] env[69475]: DEBUG nova.virt.hardware [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1231.411066] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56319d22-40b7-42fb-a0dd-057c51ffaa7e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.419425] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e498667-509a-4692-a917-4609ceda9c05 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.432488] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Instance VIF info [] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1231.437985] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Creating folder: Project (bebe57720aef441792decf9ce1ce3f05). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1231.438346] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eede6584-75dd-4eb7-81b9-0fe7674c0ad4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.447422] env[69475]: DEBUG oslo_vmware.api [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509170, 'name': PowerOffVM_Task, 'duration_secs': 0.2336} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.447723] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1231.447953] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updating instance '319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2' progress to 17 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1231.452177] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Created folder: Project (bebe57720aef441792decf9ce1ce3f05) in parent group-v700823. [ 1231.452413] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Creating folder: Instances. Parent ref: group-v701156. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1231.452873] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6e8625ba-9ed0-4245-accb-02cac4685382 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.463580] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Created folder: Instances in parent group-v701156. [ 1231.463883] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1231.464136] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1231.464373] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-09823422-14f9-4f35-9209-4fbec5683bcd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.480441] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1231.480441] env[69475]: value = "task-3509173" [ 1231.480441] env[69475]: _type = "Task" [ 1231.480441] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.488423] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509173, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.597633] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.239s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1231.598053] env[69475]: DEBUG nova.compute.manager [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1231.601992] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 4.422s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.955639] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1231.956059] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1231.956059] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1231.956234] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1231.956376] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1231.956525] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1231.956727] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1231.956884] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1231.957066] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1231.957264] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1231.957446] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1231.962479] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7cab30a5-49d0-4ac1-8480-9122c1bd6521 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.979803] env[69475]: DEBUG oslo_vmware.api [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1231.979803] env[69475]: value = "task-3509174" [ 1231.979803] env[69475]: _type = "Task" [ 1231.979803] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1231.991412] env[69475]: DEBUG oslo_vmware.api [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509174, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1231.994346] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509173, 'name': CreateVM_Task, 'duration_secs': 0.242241} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1231.994504] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1231.994897] env[69475]: DEBUG oslo_concurrency.lockutils [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1231.995070] env[69475]: DEBUG oslo_concurrency.lockutils [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1231.995419] env[69475]: DEBUG oslo_concurrency.lockutils [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1231.995660] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-745f0d51-550d-48b4-a841-b4d2fb406bbd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.000314] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1232.000314] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526e3ba5-edaa-ecc9-a249-8a68b0fcaeec" [ 1232.000314] env[69475]: _type = "Task" [ 1232.000314] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.009216] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526e3ba5-edaa-ecc9-a249-8a68b0fcaeec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.106181] env[69475]: DEBUG nova.compute.utils [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1232.111780] env[69475]: DEBUG nova.compute.manager [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Not allocating networking since 'none' was specified. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1232.491561] env[69475]: DEBUG oslo_vmware.api [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509174, 'name': ReconfigVM_Task, 'duration_secs': 0.452156} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.491778] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updating instance '319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2' progress to 33 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1232.510510] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]526e3ba5-edaa-ecc9-a249-8a68b0fcaeec, 'name': SearchDatastore_Task, 'duration_secs': 0.011462} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1232.510801] env[69475]: DEBUG oslo_concurrency.lockutils [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1232.511030] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1232.511276] env[69475]: DEBUG oslo_concurrency.lockutils [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.511421] env[69475]: DEBUG oslo_concurrency.lockutils [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1232.511599] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1232.511842] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-16b67f90-b03b-4953-9b30-76359c9a2e2c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.524914] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1232.525111] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1232.525789] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fcf022b-7b3d-4b3f-b7d4-e91a0b04aa36 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.531776] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1232.531776] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d744a1-1373-b0d0-a90d-6f6ea94cb911" [ 1232.531776] env[69475]: _type = "Task" [ 1232.531776] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1232.540074] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d744a1-1373-b0d0-a90d-6f6ea94cb911, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1232.613207] env[69475]: DEBUG nova.compute.manager [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1232.616249] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Applying migration context for instance 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2 as it has an incoming, in-progress migration 7ad5e6f9-82b2-44d2-8fef-87f39d7f662d. Migration status is migrating {{(pid=69475) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1046}} [ 1232.617387] env[69475]: INFO nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updating resource usage from migration 7ad5e6f9-82b2-44d2-8fef-87f39d7f662d [ 1232.633047] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 8d50b322-fa03-4e48-b74b-a63578e4701c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1232.633204] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance d63ddc35-06b3-43a2-bdd5-a91cf4047a4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1232.633334] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance b6a785b0-7ae8-4856-b5a8-e017cfd376d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1232.633461] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance da3eff39-b80b-4574-9b07-df6f679a9f38 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1232.633664] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1232.633822] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Migration 7ad5e6f9-82b2-44d2-8fef-87f39d7f662d is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1742}} [ 1232.633953] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1232.634100] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 51667db3-801d-4a59-b4ee-220cbf638728 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1232.634274] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance cdd3f5d9-c980-41ff-92b4-14948ee00631 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1232.998460] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1232.998838] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1232.998838] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1232.999017] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1232.999173] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1232.999322] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1232.999526] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1232.999685] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1232.999847] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1233.000015] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1233.000190] env[69475]: DEBUG nova.virt.hardware [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1233.005590] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Reconfiguring VM instance instance-0000007b to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1233.005992] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9b77da5-9190-4b95-80f5-e1e1eb75d142 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.026044] env[69475]: DEBUG oslo_vmware.api [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1233.026044] env[69475]: value = "task-3509175" [ 1233.026044] env[69475]: _type = "Task" [ 1233.026044] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.034480] env[69475]: DEBUG oslo_vmware.api [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509175, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.042626] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d744a1-1373-b0d0-a90d-6f6ea94cb911, 'name': SearchDatastore_Task, 'duration_secs': 0.046319} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.043386] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d1c0b48-3dc5-46e3-a846-3d1db99bf1dc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.049879] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1233.049879] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520f043e-d7a0-4890-ae51-5fd24afb882d" [ 1233.049879] env[69475]: _type = "Task" [ 1233.049879] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.057985] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520f043e-d7a0-4890-ae51-5fd24afb882d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.137180] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance edec6d3e-1881-4d6a-9e0f-c9a177e334ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 1233.536414] env[69475]: DEBUG oslo_vmware.api [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509175, 'name': ReconfigVM_Task, 'duration_secs': 0.159073} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.536637] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Reconfigured VM instance instance-0000007b to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1233.537394] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92fe721e-d542-405e-819d-694e2f5895a2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.559382] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] volume-1a0e6d06-d9be-4cb5-8898-b91d8b1fff30/volume-1a0e6d06-d9be-4cb5-8898-b91d8b1fff30.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1233.562094] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea7da123-e6e5-4bb0-ac8e-1abde7c697ea {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.580191] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]520f043e-d7a0-4890-ae51-5fd24afb882d, 'name': SearchDatastore_Task, 'duration_secs': 0.010904} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.581341] env[69475]: DEBUG oslo_concurrency.lockutils [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1233.581603] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 51667db3-801d-4a59-b4ee-220cbf638728/51667db3-801d-4a59-b4ee-220cbf638728.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1233.581916] env[69475]: DEBUG oslo_vmware.api [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1233.581916] env[69475]: value = "task-3509176" [ 1233.581916] env[69475]: _type = "Task" [ 1233.581916] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.582127] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-465076be-393a-4c9c-93e9-d444cd029e6c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.594279] env[69475]: DEBUG oslo_vmware.api [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509176, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.595440] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1233.595440] env[69475]: value = "task-3509177" [ 1233.595440] env[69475]: _type = "Task" [ 1233.595440] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.603142] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509177, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.626620] env[69475]: DEBUG nova.compute.manager [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1233.641608] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 2255e878-8890-46cb-a0b2-863702743691 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1794}} [ 1233.641860] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1233.642015] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2304MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1233.650882] env[69475]: DEBUG nova.virt.hardware [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1233.651243] env[69475]: DEBUG nova.virt.hardware [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1233.651453] env[69475]: DEBUG nova.virt.hardware [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1233.651668] env[69475]: DEBUG nova.virt.hardware [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1233.651871] env[69475]: DEBUG nova.virt.hardware [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1233.652085] env[69475]: DEBUG nova.virt.hardware [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1233.652333] env[69475]: DEBUG nova.virt.hardware [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1233.652516] env[69475]: DEBUG nova.virt.hardware [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1233.652719] env[69475]: DEBUG nova.virt.hardware [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1233.652901] env[69475]: DEBUG nova.virt.hardware [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1233.653109] env[69475]: DEBUG nova.virt.hardware [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1233.653989] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54664a16-d5b3-4875-85b2-1238c1662966 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.663064] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a733437-854d-4259-9be3-bb174d0471f3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.677875] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Instance VIF info [] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1233.683674] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1233.686348] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1233.686754] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-832e2306-a6c2-45ef-800e-b5ff0fec6094 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.707062] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1233.707062] env[69475]: value = "task-3509178" [ 1233.707062] env[69475]: _type = "Task" [ 1233.707062] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.717798] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509178, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.806540] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-566c2414-3169-46bd-8081-1bbf9fce7516 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.815705] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef2a00e7-a755-472e-bdec-9303fac30904 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.847765] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83a15e5a-a35c-4807-9665-e11f18bc6d53 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.858524] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e52747-3c93-49ec-9acb-94b249d03b02 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.877923] env[69475]: DEBUG nova.compute.provider_tree [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1234.095605] env[69475]: DEBUG oslo_vmware.api [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509176, 'name': ReconfigVM_Task, 'duration_secs': 0.28068} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.095983] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Reconfigured VM instance instance-0000007b to attach disk [datastore1] volume-1a0e6d06-d9be-4cb5-8898-b91d8b1fff30/volume-1a0e6d06-d9be-4cb5-8898-b91d8b1fff30.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1234.096226] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updating instance '319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2' progress to 50 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1234.107498] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509177, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465066} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.107726] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 51667db3-801d-4a59-b4ee-220cbf638728/51667db3-801d-4a59-b4ee-220cbf638728.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1234.107931] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1234.108186] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-22ba4f83-763f-4731-b5e3-9e1e913c3c42 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.114665] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1234.114665] env[69475]: value = "task-3509179" [ 1234.114665] env[69475]: _type = "Task" [ 1234.114665] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.122054] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509179, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.217790] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509178, 'name': CreateVM_Task} progress is 99%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.381109] env[69475]: DEBUG nova.scheduler.client.report [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1234.605014] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8b9d17-1817-4d26-b583-2ac9949935ae {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.627204] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5120adf8-c2b2-4d84-b817-69c596384c56 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.635333] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509179, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.102309} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.647884] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1234.648240] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updating instance '319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2' progress to 67 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1234.652058] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-610cc0f5-561c-41a3-9d5e-f022427a1b65 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.674053] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 51667db3-801d-4a59-b4ee-220cbf638728/51667db3-801d-4a59-b4ee-220cbf638728.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1234.674335] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef5f9054-1a27-4735-9d02-8dfc129d0f72 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.696171] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1234.696171] env[69475]: value = "task-3509180" [ 1234.696171] env[69475]: _type = "Task" [ 1234.696171] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.706762] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509180, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.716621] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509178, 'name': CreateVM_Task, 'duration_secs': 0.559051} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.716814] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1234.717200] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.717379] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1234.717693] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1234.717937] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc2a3594-0c20-4620-824c-bfe09007fe2e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.723280] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1234.723280] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52abddd0-4d3f-261e-d144-ab11ca125ae0" [ 1234.723280] env[69475]: _type = "Task" [ 1234.723280] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.734054] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52abddd0-4d3f-261e-d144-ab11ca125ae0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.885818] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69475) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1234.885996] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.284s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1234.886280] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.595s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1234.887860] env[69475]: INFO nova.compute.claims [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1235.207063] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509180, 'name': ReconfigVM_Task, 'duration_secs': 0.330979} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.207063] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 51667db3-801d-4a59-b4ee-220cbf638728/51667db3-801d-4a59-b4ee-220cbf638728.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1235.207481] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-03ec683a-e498-4c00-948e-dcecfa3a9fb7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.215014] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1235.215014] env[69475]: value = "task-3509181" [ 1235.215014] env[69475]: _type = "Task" [ 1235.215014] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.223216] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509181, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.232267] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52abddd0-4d3f-261e-d144-ab11ca125ae0, 'name': SearchDatastore_Task, 'duration_secs': 0.010586} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.232542] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1235.232772] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1235.233010] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1235.233166] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1235.233377] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1235.233636] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2906aac5-70f5-4587-8d3b-1e98f7c8c22c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.244475] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1235.244696] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1235.245405] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e995b0f4-4888-46da-b4ba-64fa129b455c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.251442] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1235.251442] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5244e54d-0276-9b4b-468f-178d32a6ca8f" [ 1235.251442] env[69475]: _type = "Task" [ 1235.251442] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.260202] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5244e54d-0276-9b4b-468f-178d32a6ca8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.725113] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509181, 'name': Rename_Task, 'duration_secs': 0.139491} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.725438] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1235.725679] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-54dac253-d978-43eb-b297-e59300511dc8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.733151] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1235.733151] env[69475]: value = "task-3509182" [ 1235.733151] env[69475]: _type = "Task" [ 1235.733151] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.741575] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509182, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.762869] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5244e54d-0276-9b4b-468f-178d32a6ca8f, 'name': SearchDatastore_Task, 'duration_secs': 0.010242} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.763663] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-388e8340-929d-4879-ba49-5aa777e71654 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.770528] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1235.770528] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524b4e4b-e2a4-733d-6356-dd10ebaec4c9" [ 1235.770528] env[69475]: _type = "Task" [ 1235.770528] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.781618] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524b4e4b-e2a4-733d-6356-dd10ebaec4c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.031422] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667e834f-9f3f-4c0c-acad-e19c140dec94 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.039678] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e380da5-dbcd-47f8-9605-8f014b249c95 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.071080] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c31d501-9f37-4b84-84aa-1a539a4852cc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.079574] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79cfa9d6-31fb-4fa6-96b8-935fcb3836d7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.093667] env[69475]: DEBUG nova.compute.provider_tree [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1236.243788] env[69475]: DEBUG oslo_vmware.api [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509182, 'name': PowerOnVM_Task, 'duration_secs': 0.456842} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.244225] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1236.244304] env[69475]: INFO nova.compute.manager [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Took 4.86 seconds to spawn the instance on the hypervisor. [ 1236.244481] env[69475]: DEBUG nova.compute.manager [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1236.245238] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07fc44fc-930f-4b03-8167-be05953fd920 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.280606] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]524b4e4b-e2a4-733d-6356-dd10ebaec4c9, 'name': SearchDatastore_Task, 'duration_secs': 0.011825} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.280858] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1236.281128] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] cdd3f5d9-c980-41ff-92b4-14948ee00631/cdd3f5d9-c980-41ff-92b4-14948ee00631.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1236.281398] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b952b6d7-bfd0-413c-8338-4c946bdf64fa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.288922] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1236.288922] env[69475]: value = "task-3509183" [ 1236.288922] env[69475]: _type = "Task" [ 1236.288922] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.293640] env[69475]: DEBUG nova.network.neutron [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Port 789f3dcb-d9c8-495b-b66c-896fb31e0e63 binding to destination host cpu-1 is already ACTIVE {{(pid=69475) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1236.298509] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509183, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.596796] env[69475]: DEBUG nova.scheduler.client.report [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1236.762285] env[69475]: INFO nova.compute.manager [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Took 10.41 seconds to build instance. [ 1236.799119] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509183, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.43672} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.802905] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] cdd3f5d9-c980-41ff-92b4-14948ee00631/cdd3f5d9-c980-41ff-92b4-14948ee00631.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1236.803144] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1236.803681] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-22ed86d3-653c-47b9-b80c-fec8629069fb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.811675] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1236.811675] env[69475]: value = "task-3509184" [ 1236.811675] env[69475]: _type = "Task" [ 1236.811675] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.821676] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509184, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.102615] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.216s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.103159] env[69475]: DEBUG nova.compute.manager [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1237.106431] env[69475]: DEBUG oslo_concurrency.lockutils [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.491s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.107840] env[69475]: INFO nova.compute.claims [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1237.264459] env[69475]: DEBUG oslo_concurrency.lockutils [None req-23a6063d-92fb-4a07-94ec-4c56a9893f7c tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "51667db3-801d-4a59-b4ee-220cbf638728" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.922s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.322538] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1237.322863] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1237.323082] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.341888] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509184, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064479} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.342161] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1237.343062] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd7a717-05c6-4c3c-978e-549b0a068561 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.363852] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] cdd3f5d9-c980-41ff-92b4-14948ee00631/cdd3f5d9-c980-41ff-92b4-14948ee00631.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1237.364410] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-171c4999-cbd5-46fc-b112-0b9af77f83e6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.385309] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1237.385309] env[69475]: value = "task-3509185" [ 1237.385309] env[69475]: _type = "Task" [ 1237.385309] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.393580] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509185, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.613545] env[69475]: DEBUG nova.compute.utils [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1237.616996] env[69475]: DEBUG nova.compute.manager [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1237.617273] env[69475]: DEBUG nova.network.neutron [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1237.660511] env[69475]: DEBUG nova.policy [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bb93c2f0a3554be8b25cde370a4083ac', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'de2b24bdabce45a7884bdce4ed781c79', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1237.896749] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509185, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.936166] env[69475]: DEBUG nova.network.neutron [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Successfully created port: 1243d440-897a-44e6-8f1e-2fbd61a5922f {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1238.121471] env[69475]: DEBUG nova.compute.manager [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1238.257697] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae03197-6931-484a-8f5a-534e83ae37af {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.267481] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8894db91-f53f-4ae5-892e-90c20fd59fd1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.298022] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ede2fe-f177-43dd-a328-60bb24d7714d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.305479] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdfe7c9-f939-4ba8-99e1-41f51d12b8a4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.319070] env[69475]: DEBUG nova.compute.provider_tree [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1238.366250] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "refresh_cache-319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1238.366494] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "refresh_cache-319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1238.366680] env[69475]: DEBUG nova.network.neutron [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1238.395987] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509185, 'name': ReconfigVM_Task, 'duration_secs': 0.603425} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.396325] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Reconfigured VM instance instance-0000007d to attach disk [datastore1] cdd3f5d9-c980-41ff-92b4-14948ee00631/cdd3f5d9-c980-41ff-92b4-14948ee00631.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1238.397031] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-05da2459-bc12-4839-acfd-2a6248f7e531 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.404429] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1238.404429] env[69475]: value = "task-3509186" [ 1238.404429] env[69475]: _type = "Task" [ 1238.404429] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.412841] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509186, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.822240] env[69475]: DEBUG nova.scheduler.client.report [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1238.918788] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509186, 'name': Rename_Task, 'duration_secs': 0.143383} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.919128] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1238.919411] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-790cd029-f1b5-41bf-810b-df7a0ebee497 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.926651] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1238.926651] env[69475]: value = "task-3509187" [ 1238.926651] env[69475]: _type = "Task" [ 1238.926651] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.935245] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509187, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.104177] env[69475]: DEBUG nova.network.neutron [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updating instance_info_cache with network_info: [{"id": "789f3dcb-d9c8-495b-b66c-896fb31e0e63", "address": "fa:16:3e:b0:88:42", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap789f3dcb-d9", "ovs_interfaceid": "789f3dcb-d9c8-495b-b66c-896fb31e0e63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1239.130268] env[69475]: DEBUG nova.compute.manager [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1239.159571] env[69475]: DEBUG nova.virt.hardware [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1239.159830] env[69475]: DEBUG nova.virt.hardware [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1239.159988] env[69475]: DEBUG nova.virt.hardware [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1239.160189] env[69475]: DEBUG nova.virt.hardware [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1239.160334] env[69475]: DEBUG nova.virt.hardware [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1239.160480] env[69475]: DEBUG nova.virt.hardware [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1239.160683] env[69475]: DEBUG nova.virt.hardware [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1239.160866] env[69475]: DEBUG nova.virt.hardware [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1239.161076] env[69475]: DEBUG nova.virt.hardware [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1239.161250] env[69475]: DEBUG nova.virt.hardware [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1239.161425] env[69475]: DEBUG nova.virt.hardware [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1239.162500] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb7c802-fa3c-4c98-8e3c-edcb25b15de0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.170893] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74852fea-3a99-451f-9bca-dccdfeb3b1a4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.322167] env[69475]: DEBUG nova.compute.manager [req-cd3bc2b5-0f89-462b-8280-5c24d0963b51 req-0ffc5ca2-f05b-4601-8ae7-2e67be288a3f service nova] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Received event network-vif-plugged-1243d440-897a-44e6-8f1e-2fbd61a5922f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1239.322167] env[69475]: DEBUG oslo_concurrency.lockutils [req-cd3bc2b5-0f89-462b-8280-5c24d0963b51 req-0ffc5ca2-f05b-4601-8ae7-2e67be288a3f service nova] Acquiring lock "edec6d3e-1881-4d6a-9e0f-c9a177e334ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1239.322167] env[69475]: DEBUG oslo_concurrency.lockutils [req-cd3bc2b5-0f89-462b-8280-5c24d0963b51 req-0ffc5ca2-f05b-4601-8ae7-2e67be288a3f service nova] Lock "edec6d3e-1881-4d6a-9e0f-c9a177e334ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1239.322167] env[69475]: DEBUG oslo_concurrency.lockutils [req-cd3bc2b5-0f89-462b-8280-5c24d0963b51 req-0ffc5ca2-f05b-4601-8ae7-2e67be288a3f service nova] Lock "edec6d3e-1881-4d6a-9e0f-c9a177e334ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1239.323170] env[69475]: DEBUG nova.compute.manager [req-cd3bc2b5-0f89-462b-8280-5c24d0963b51 req-0ffc5ca2-f05b-4601-8ae7-2e67be288a3f service nova] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] No waiting events found dispatching network-vif-plugged-1243d440-897a-44e6-8f1e-2fbd61a5922f {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1239.323531] env[69475]: WARNING nova.compute.manager [req-cd3bc2b5-0f89-462b-8280-5c24d0963b51 req-0ffc5ca2-f05b-4601-8ae7-2e67be288a3f service nova] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Received unexpected event network-vif-plugged-1243d440-897a-44e6-8f1e-2fbd61a5922f for instance with vm_state building and task_state spawning. [ 1239.326733] env[69475]: DEBUG oslo_concurrency.lockutils [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.220s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1239.327333] env[69475]: DEBUG nova.compute.manager [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1239.417485] env[69475]: DEBUG nova.network.neutron [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Successfully updated port: 1243d440-897a-44e6-8f1e-2fbd61a5922f {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1239.436992] env[69475]: DEBUG oslo_vmware.api [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509187, 'name': PowerOnVM_Task, 'duration_secs': 0.452795} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.437960] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1239.437960] env[69475]: INFO nova.compute.manager [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Took 5.81 seconds to spawn the instance on the hypervisor. [ 1239.437960] env[69475]: DEBUG nova.compute.manager [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1239.438722] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22f65f2d-96a6-44e1-99c7-f6641f603fa8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.607481] env[69475]: DEBUG oslo_concurrency.lockutils [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "refresh_cache-319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1239.831994] env[69475]: DEBUG nova.compute.utils [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1239.833518] env[69475]: DEBUG nova.compute.manager [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Not allocating networking since 'none' was specified. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1239.919728] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "refresh_cache-edec6d3e-1881-4d6a-9e0f-c9a177e334ad" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.919883] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "refresh_cache-edec6d3e-1881-4d6a-9e0f-c9a177e334ad" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1239.920030] env[69475]: DEBUG nova.network.neutron [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1239.955388] env[69475]: INFO nova.compute.manager [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Took 13.24 seconds to build instance. [ 1240.116453] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d104fb8b-bfe6-4e07-95af-57c79a5e5794 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.123964] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f98bcf-8dec-494b-a0bc-df6b9e30d629 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.334689] env[69475]: DEBUG nova.compute.manager [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1240.453432] env[69475]: DEBUG nova.network.neutron [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1240.457044] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f6d095f3-f05b-4b87-8145-19d25f5362aa tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "cdd3f5d9-c980-41ff-92b4-14948ee00631" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.749s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.548641] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9fe6468f-0f34-4733-95b8-20e3d31c9d1b tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1240.548884] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9fe6468f-0f34-4733-95b8-20e3d31c9d1b tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1240.649820] env[69475]: DEBUG nova.network.neutron [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Updating instance_info_cache with network_info: [{"id": "1243d440-897a-44e6-8f1e-2fbd61a5922f", "address": "fa:16:3e:27:16:3b", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1243d440-89", "ovs_interfaceid": "1243d440-897a-44e6-8f1e-2fbd61a5922f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.774677] env[69475]: INFO nova.compute.manager [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Rebuilding instance [ 1240.815192] env[69475]: DEBUG nova.compute.manager [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1240.816079] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1284f2f2-d5da-4063-ad68-49f8abf65092 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.052234] env[69475]: INFO nova.compute.manager [None req-9fe6468f-0f34-4733-95b8-20e3d31c9d1b tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Detaching volume 55979385-56fa-4679-83f2-a4ecdaa6c8f3 [ 1241.084619] env[69475]: INFO nova.virt.block_device [None req-9fe6468f-0f34-4733-95b8-20e3d31c9d1b tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Attempting to driver detach volume 55979385-56fa-4679-83f2-a4ecdaa6c8f3 from mountpoint /dev/sdb [ 1241.084905] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fe6468f-0f34-4733-95b8-20e3d31c9d1b tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1241.085152] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fe6468f-0f34-4733-95b8-20e3d31c9d1b tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701151', 'volume_id': '55979385-56fa-4679-83f2-a4ecdaa6c8f3', 'name': 'volume-55979385-56fa-4679-83f2-a4ecdaa6c8f3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd63ddc35-06b3-43a2-bdd5-a91cf4047a4b', 'attached_at': '', 'detached_at': '', 'volume_id': '55979385-56fa-4679-83f2-a4ecdaa6c8f3', 'serial': '55979385-56fa-4679-83f2-a4ecdaa6c8f3'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1241.086026] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f3f404-5d43-4ebb-a169-a65b72fa102c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.107944] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-226afa4e-d610-4ad1-96bc-a07f5fe7aaac {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.114697] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6f0196f-4e89-4f77-9fe1-5c02489222ab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.136204] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fa0d5e7-dd1c-4c1d-a300-6319000b6ed3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.150572] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fe6468f-0f34-4733-95b8-20e3d31c9d1b tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] The volume has not been displaced from its original location: [datastore1] volume-55979385-56fa-4679-83f2-a4ecdaa6c8f3/volume-55979385-56fa-4679-83f2-a4ecdaa6c8f3.vmdk. No consolidation needed. {{(pid=69475) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1241.155868] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fe6468f-0f34-4733-95b8-20e3d31c9d1b tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Reconfiguring VM instance instance-00000074 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1241.156388] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "refresh_cache-edec6d3e-1881-4d6a-9e0f-c9a177e334ad" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1241.156672] env[69475]: DEBUG nova.compute.manager [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Instance network_info: |[{"id": "1243d440-897a-44e6-8f1e-2fbd61a5922f", "address": "fa:16:3e:27:16:3b", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1243d440-89", "ovs_interfaceid": "1243d440-897a-44e6-8f1e-2fbd61a5922f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1241.156893] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bcf5fd9c-6a86-46b4-92d9-7699e3154d68 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.169369] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:16:3b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271fe7a0-dfd7-409b-920a-cf83ef1a86a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1243d440-897a-44e6-8f1e-2fbd61a5922f', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1241.176396] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1241.176593] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1241.177154] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5feed8d2-afff-4f8b-bf3b-50721b134a9d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.192193] env[69475]: DEBUG oslo_vmware.api [None req-9fe6468f-0f34-4733-95b8-20e3d31c9d1b tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1241.192193] env[69475]: value = "task-3509188" [ 1241.192193] env[69475]: _type = "Task" [ 1241.192193] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.197385] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1241.197385] env[69475]: value = "task-3509189" [ 1241.197385] env[69475]: _type = "Task" [ 1241.197385] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.203611] env[69475]: DEBUG oslo_vmware.api [None req-9fe6468f-0f34-4733-95b8-20e3d31c9d1b tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509188, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.207754] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509189, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.257060] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d185284e-3710-475c-bf42-ab59ee9c1b7f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.276625] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b2971e5-5d9b-4a61-9ad3-c3c6e72b2dac {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.284829] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updating instance '319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2' progress to 83 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1241.343150] env[69475]: DEBUG nova.compute.manager [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1241.349917] env[69475]: DEBUG nova.compute.manager [req-7a717984-be82-478c-b500-ee9c91fd0b4c req-d5248ccf-b6f7-44f4-bc5e-6df1fab586f4 service nova] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Received event network-changed-1243d440-897a-44e6-8f1e-2fbd61a5922f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1241.350136] env[69475]: DEBUG nova.compute.manager [req-7a717984-be82-478c-b500-ee9c91fd0b4c req-d5248ccf-b6f7-44f4-bc5e-6df1fab586f4 service nova] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Refreshing instance network info cache due to event network-changed-1243d440-897a-44e6-8f1e-2fbd61a5922f. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1241.350362] env[69475]: DEBUG oslo_concurrency.lockutils [req-7a717984-be82-478c-b500-ee9c91fd0b4c req-d5248ccf-b6f7-44f4-bc5e-6df1fab586f4 service nova] Acquiring lock "refresh_cache-edec6d3e-1881-4d6a-9e0f-c9a177e334ad" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.350505] env[69475]: DEBUG oslo_concurrency.lockutils [req-7a717984-be82-478c-b500-ee9c91fd0b4c req-d5248ccf-b6f7-44f4-bc5e-6df1fab586f4 service nova] Acquired lock "refresh_cache-edec6d3e-1881-4d6a-9e0f-c9a177e334ad" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1241.350661] env[69475]: DEBUG nova.network.neutron [req-7a717984-be82-478c-b500-ee9c91fd0b4c req-d5248ccf-b6f7-44f4-bc5e-6df1fab586f4 service nova] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Refreshing network info cache for port 1243d440-897a-44e6-8f1e-2fbd61a5922f {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1241.367866] env[69475]: DEBUG nova.virt.hardware [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1241.368098] env[69475]: DEBUG nova.virt.hardware [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1241.368262] env[69475]: DEBUG nova.virt.hardware [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1241.368445] env[69475]: DEBUG nova.virt.hardware [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1241.368612] env[69475]: DEBUG nova.virt.hardware [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1241.368750] env[69475]: DEBUG nova.virt.hardware [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1241.368958] env[69475]: DEBUG nova.virt.hardware [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1241.369136] env[69475]: DEBUG nova.virt.hardware [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1241.369311] env[69475]: DEBUG nova.virt.hardware [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1241.369494] env[69475]: DEBUG nova.virt.hardware [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1241.369658] env[69475]: DEBUG nova.virt.hardware [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1241.370885] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c1ae39-7574-410b-96cb-816062ac25fe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.379093] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c572958d-8b22-4b8f-8648-4f267726acde {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.394767] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Instance VIF info [] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1241.400290] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Creating folder: Project (39b7a0f0f92a49f3b5e3a4565f1add06). Parent ref: group-v700823. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1241.400847] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f61af72-9b25-4af8-8be7-5a0f81442a36 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.411025] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Created folder: Project (39b7a0f0f92a49f3b5e3a4565f1add06) in parent group-v700823. [ 1241.411243] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Creating folder: Instances. Parent ref: group-v701161. {{(pid=69475) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1241.411479] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-587f0089-7f9d-45c2-9f6f-9a8b258ea503 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.420892] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Created folder: Instances in parent group-v701161. [ 1241.421147] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1241.421343] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2255e878-8890-46cb-a0b2-863702743691] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1241.421675] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92ce6e91-c35f-40f0-b5f6-b712ed1833c9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.439475] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1241.439475] env[69475]: value = "task-3509192" [ 1241.439475] env[69475]: _type = "Task" [ 1241.439475] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.447561] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509192, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.705220] env[69475]: DEBUG oslo_vmware.api [None req-9fe6468f-0f34-4733-95b8-20e3d31c9d1b tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509188, 'name': ReconfigVM_Task, 'duration_secs': 0.30524} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.705943] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fe6468f-0f34-4733-95b8-20e3d31c9d1b tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Reconfigured VM instance instance-00000074 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1241.713780] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5d5101f-8696-4d07-ac3b-9e69c9c2d0a6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.724055] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509189, 'name': CreateVM_Task, 'duration_secs': 0.373496} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.724055] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1241.724700] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.724862] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1241.725199] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1241.725452] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-545ae69a-d5a5-4faf-8b10-908cfc0837ff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.728097] env[69475]: DEBUG oslo_vmware.api [None req-9fe6468f-0f34-4733-95b8-20e3d31c9d1b tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1241.728097] env[69475]: value = "task-3509193" [ 1241.728097] env[69475]: _type = "Task" [ 1241.728097] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.732900] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1241.732900] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cefae2-7038-9b92-e50f-d5ca7b648fb2" [ 1241.732900] env[69475]: _type = "Task" [ 1241.732900] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.739170] env[69475]: DEBUG oslo_vmware.api [None req-9fe6468f-0f34-4733-95b8-20e3d31c9d1b tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509193, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.743940] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cefae2-7038-9b92-e50f-d5ca7b648fb2, 'name': SearchDatastore_Task, 'duration_secs': 0.008524} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.744215] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1241.744467] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1241.744714] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.744857] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1241.745047] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1241.745286] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c5e1c22f-a1d9-4ad3-ba55-adea64c2224e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.752324] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1241.752507] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1241.753260] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d5cc5f9-a1df-412e-830c-e23cdaf3ebdc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.758356] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1241.758356] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529efe5c-252b-a6f3-3e05-908d0cff8b00" [ 1241.758356] env[69475]: _type = "Task" [ 1241.758356] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.766364] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529efe5c-252b-a6f3-3e05-908d0cff8b00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.791412] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1241.791969] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dc58985f-d8b3-4a5d-a5f3-a1b8c4670478 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.797687] env[69475]: DEBUG oslo_vmware.api [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1241.797687] env[69475]: value = "task-3509194" [ 1241.797687] env[69475]: _type = "Task" [ 1241.797687] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.806295] env[69475]: DEBUG oslo_vmware.api [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509194, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.829260] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1241.829858] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-659aad1b-2e10-408a-8254-594911ef9aab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.835911] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1241.835911] env[69475]: value = "task-3509195" [ 1241.835911] env[69475]: _type = "Task" [ 1241.835911] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.844994] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509195, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1241.951875] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509192, 'name': CreateVM_Task, 'duration_secs': 0.281656} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1241.952546] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2255e878-8890-46cb-a0b2-863702743691] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1241.953065] env[69475]: DEBUG oslo_concurrency.lockutils [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1241.953309] env[69475]: DEBUG oslo_concurrency.lockutils [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1241.953718] env[69475]: DEBUG oslo_concurrency.lockutils [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1241.954377] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e722d168-8d57-44c0-b547-33d78dd8dc53 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.959662] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1241.959662] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52076bc2-1e20-f50b-69d4-974b4189b568" [ 1241.959662] env[69475]: _type = "Task" [ 1241.959662] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.970604] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52076bc2-1e20-f50b-69d4-974b4189b568, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.091696] env[69475]: DEBUG nova.network.neutron [req-7a717984-be82-478c-b500-ee9c91fd0b4c req-d5248ccf-b6f7-44f4-bc5e-6df1fab586f4 service nova] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Updated VIF entry in instance network info cache for port 1243d440-897a-44e6-8f1e-2fbd61a5922f. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1242.092064] env[69475]: DEBUG nova.network.neutron [req-7a717984-be82-478c-b500-ee9c91fd0b4c req-d5248ccf-b6f7-44f4-bc5e-6df1fab586f4 service nova] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Updating instance_info_cache with network_info: [{"id": "1243d440-897a-44e6-8f1e-2fbd61a5922f", "address": "fa:16:3e:27:16:3b", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1243d440-89", "ovs_interfaceid": "1243d440-897a-44e6-8f1e-2fbd61a5922f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1242.238726] env[69475]: DEBUG oslo_vmware.api [None req-9fe6468f-0f34-4733-95b8-20e3d31c9d1b tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509193, 'name': ReconfigVM_Task, 'duration_secs': 0.14408} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.239033] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9fe6468f-0f34-4733-95b8-20e3d31c9d1b tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701151', 'volume_id': '55979385-56fa-4679-83f2-a4ecdaa6c8f3', 'name': 'volume-55979385-56fa-4679-83f2-a4ecdaa6c8f3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'd63ddc35-06b3-43a2-bdd5-a91cf4047a4b', 'attached_at': '', 'detached_at': '', 'volume_id': '55979385-56fa-4679-83f2-a4ecdaa6c8f3', 'serial': '55979385-56fa-4679-83f2-a4ecdaa6c8f3'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1242.269302] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]529efe5c-252b-a6f3-3e05-908d0cff8b00, 'name': SearchDatastore_Task, 'duration_secs': 0.008116} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.270087] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5320f4c-9d58-4887-901a-4c2b2dafe7f1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.275120] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1242.275120] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d6bce0-4d5b-e27e-3270-4bd88d56101b" [ 1242.275120] env[69475]: _type = "Task" [ 1242.275120] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.282193] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d6bce0-4d5b-e27e-3270-4bd88d56101b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.305461] env[69475]: DEBUG oslo_vmware.api [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509194, 'name': PowerOnVM_Task, 'duration_secs': 0.474172} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.305712] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1242.305898] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-d18a80d5-c1e4-4d2e-8bf2-18ec2b480d9b tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updating instance '319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2' progress to 100 {{(pid=69475) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1242.345077] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509195, 'name': PowerOffVM_Task, 'duration_secs': 0.147406} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.345399] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1242.345550] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1242.346312] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7bb2bc1-652d-4aff-8309-13e85ec957a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.353579] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1242.353801] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-113b2d76-4be3-420c-a795-d95622e851a4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.377051] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1242.377270] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1242.377471] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Deleting the datastore file [datastore1] cdd3f5d9-c980-41ff-92b4-14948ee00631 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1242.377692] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bf6510a5-2256-4173-a1d1-66aec1d23933 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.394468] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1242.394468] env[69475]: value = "task-3509197" [ 1242.394468] env[69475]: _type = "Task" [ 1242.394468] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.402768] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509197, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.470232] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52076bc2-1e20-f50b-69d4-974b4189b568, 'name': SearchDatastore_Task, 'duration_secs': 0.01646} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.470654] env[69475]: DEBUG oslo_concurrency.lockutils [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1242.470965] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1242.471240] env[69475]: DEBUG oslo_concurrency.lockutils [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1242.594786] env[69475]: DEBUG oslo_concurrency.lockutils [req-7a717984-be82-478c-b500-ee9c91fd0b4c req-d5248ccf-b6f7-44f4-bc5e-6df1fab586f4 service nova] Releasing lock "refresh_cache-edec6d3e-1881-4d6a-9e0f-c9a177e334ad" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1242.783143] env[69475]: DEBUG nova.objects.instance [None req-9fe6468f-0f34-4733-95b8-20e3d31c9d1b tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lazy-loading 'flavor' on Instance uuid d63ddc35-06b3-43a2-bdd5-a91cf4047a4b {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1242.790330] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52d6bce0-4d5b-e27e-3270-4bd88d56101b, 'name': SearchDatastore_Task, 'duration_secs': 0.030847} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.791029] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1242.791399] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] edec6d3e-1881-4d6a-9e0f-c9a177e334ad/edec6d3e-1881-4d6a-9e0f-c9a177e334ad.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1242.791793] env[69475]: DEBUG oslo_concurrency.lockutils [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1242.792078] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1242.792368] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a6564c58-36cf-435e-b67b-a7582c57a0a3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.794996] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-546eb921-84f5-4525-a9d3-52192fc01c3c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.803015] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1242.803015] env[69475]: value = "task-3509198" [ 1242.803015] env[69475]: _type = "Task" [ 1242.803015] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.808545] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1242.808796] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1242.810148] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56371795-e8a0-43b1-b1d4-0296638a4a9d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1242.821892] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509198, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.827842] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1242.827842] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5259fbc5-2d47-d30d-895d-09d3675296fa" [ 1242.827842] env[69475]: _type = "Task" [ 1242.827842] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1242.837022] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5259fbc5-2d47-d30d-895d-09d3675296fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.904715] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509197, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101484} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1242.906025] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1242.906025] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1242.906025] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1243.313684] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509198, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.450987} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.313948] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] edec6d3e-1881-4d6a-9e0f-c9a177e334ad/edec6d3e-1881-4d6a-9e0f-c9a177e334ad.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1243.314183] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1243.314579] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-422f5134-e18d-4f6a-b9a2-3fa42945f32d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.321982] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1243.321982] env[69475]: value = "task-3509199" [ 1243.321982] env[69475]: _type = "Task" [ 1243.321982] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.346056] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509199, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.346056] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5259fbc5-2d47-d30d-895d-09d3675296fa, 'name': SearchDatastore_Task, 'duration_secs': 0.007988} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.346483] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc43d0cd-b368-418e-a975-08341c9861dd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.352497] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1243.352497] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5225517e-9dd6-e544-b10c-a2002f22bfc1" [ 1243.352497] env[69475]: _type = "Task" [ 1243.352497] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.367017] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5225517e-9dd6-e544-b10c-a2002f22bfc1, 'name': SearchDatastore_Task, 'duration_secs': 0.010104} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.367017] env[69475]: DEBUG oslo_concurrency.lockutils [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1243.367017] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 2255e878-8890-46cb-a0b2-863702743691/2255e878-8890-46cb-a0b2-863702743691.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1243.367017] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34c3ca1f-d3ab-4a27-8488-f3398dd08d7c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.373694] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1243.373694] env[69475]: value = "task-3509200" [ 1243.373694] env[69475]: _type = "Task" [ 1243.373694] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.731181] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cfdcee81-20b7-4020-8efe-0eab5f8414f4 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.795618] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9fe6468f-0f34-4733-95b8-20e3d31c9d1b tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.247s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1243.796872] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cfdcee81-20b7-4020-8efe-0eab5f8414f4 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.066s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.797034] env[69475]: DEBUG nova.compute.manager [None req-cfdcee81-20b7-4020-8efe-0eab5f8414f4 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1243.798198] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9a54bd-2e6d-40cf-af47-c8d74edb3197 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.805492] env[69475]: DEBUG nova.compute.manager [None req-cfdcee81-20b7-4020-8efe-0eab5f8414f4 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69475) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1243.806085] env[69475]: DEBUG nova.objects.instance [None req-cfdcee81-20b7-4020-8efe-0eab5f8414f4 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lazy-loading 'flavor' on Instance uuid d63ddc35-06b3-43a2-bdd5-a91cf4047a4b {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1243.831674] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509199, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088271} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.831946] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1243.832661] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc522a3-6f29-4db7-bd5d-9f75e4f9d847 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.854965] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Reconfiguring VM instance instance-0000007e to attach disk [datastore1] edec6d3e-1881-4d6a-9e0f-c9a177e334ad/edec6d3e-1881-4d6a-9e0f-c9a177e334ad.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1243.855263] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c41cdb48-7796-433b-ad0b-329893d095dd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.883343] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1243.883343] env[69475]: value = "task-3509201" [ 1243.883343] env[69475]: _type = "Task" [ 1243.883343] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.889465] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509200, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44117} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.889771] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 2255e878-8890-46cb-a0b2-863702743691/2255e878-8890-46cb-a0b2-863702743691.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1243.889938] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1243.890574] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3569cc3d-bc30-4623-859f-afdeaf9c9189 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.898216] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509201, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.903808] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1243.903808] env[69475]: value = "task-3509202" [ 1243.903808] env[69475]: _type = "Task" [ 1243.903808] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.916662] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509202, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.943586] env[69475]: DEBUG nova.virt.hardware [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1243.943829] env[69475]: DEBUG nova.virt.hardware [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1243.943989] env[69475]: DEBUG nova.virt.hardware [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1243.944189] env[69475]: DEBUG nova.virt.hardware [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1243.944339] env[69475]: DEBUG nova.virt.hardware [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1243.944489] env[69475]: DEBUG nova.virt.hardware [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1243.944697] env[69475]: DEBUG nova.virt.hardware [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1243.944857] env[69475]: DEBUG nova.virt.hardware [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1243.945040] env[69475]: DEBUG nova.virt.hardware [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1243.945209] env[69475]: DEBUG nova.virt.hardware [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1243.945382] env[69475]: DEBUG nova.virt.hardware [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1243.946256] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-928b2841-25ce-4af5-80fa-2e537b9f201c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.956622] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114c5466-8a83-4f04-9723-bb08353969ff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.971200] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Instance VIF info [] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1243.976840] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1243.977107] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1243.977390] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-79862662-c288-4471-8d8a-92c3814575cd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.001309] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1244.001309] env[69475]: value = "task-3509203" [ 1244.001309] env[69475]: _type = "Task" [ 1244.001309] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.012030] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509203, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.395705] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509201, 'name': ReconfigVM_Task, 'duration_secs': 0.338074} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.396637] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Reconfigured VM instance instance-0000007e to attach disk [datastore1] edec6d3e-1881-4d6a-9e0f-c9a177e334ad/edec6d3e-1881-4d6a-9e0f-c9a177e334ad.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1244.396829] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ba63cdfc-a061-4e32-bd4e-e6130e7d0a4b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.404606] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1244.404606] env[69475]: value = "task-3509204" [ 1244.404606] env[69475]: _type = "Task" [ 1244.404606] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.420241] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509204, 'name': Rename_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.423346] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509202, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105757} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.424124] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1244.424497] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cea85415-f7f5-468d-b0cd-dd5a5bb07e95 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.446264] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] 2255e878-8890-46cb-a0b2-863702743691/2255e878-8890-46cb-a0b2-863702743691.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1244.446264] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88457a49-06f1-4e20-ae6a-d24782295ca0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.467820] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1244.467820] env[69475]: value = "task-3509205" [ 1244.467820] env[69475]: _type = "Task" [ 1244.467820] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.477125] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509205, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.502130] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.502454] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.502634] env[69475]: DEBUG nova.compute.manager [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Going to confirm migration 10 {{(pid=69475) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1244.516775] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509203, 'name': CreateVM_Task, 'duration_secs': 0.360387} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.517161] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1244.517633] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.517830] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1244.518189] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1244.518492] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd73033e-01ba-4cc2-924a-5a93722529cb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.525138] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1244.525138] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525f67fb-0a9e-815e-fed6-8a7875d153ec" [ 1244.525138] env[69475]: _type = "Task" [ 1244.525138] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.535130] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525f67fb-0a9e-815e-fed6-8a7875d153ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.814072] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfdcee81-20b7-4020-8efe-0eab5f8414f4 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1244.814388] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5cb48a2c-43a3-49a9-b4ec-03994b048ae7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.822545] env[69475]: DEBUG oslo_vmware.api [None req-cfdcee81-20b7-4020-8efe-0eab5f8414f4 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1244.822545] env[69475]: value = "task-3509206" [ 1244.822545] env[69475]: _type = "Task" [ 1244.822545] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.832288] env[69475]: DEBUG oslo_vmware.api [None req-cfdcee81-20b7-4020-8efe-0eab5f8414f4 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509206, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.918321] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509204, 'name': Rename_Task, 'duration_secs': 0.147077} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.918637] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1244.918794] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc294cbf-e5f6-405e-8e89-550029f3ec41 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.925718] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1244.925718] env[69475]: value = "task-3509207" [ 1244.925718] env[69475]: _type = "Task" [ 1244.925718] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1244.933309] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509207, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1244.978795] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509205, 'name': ReconfigVM_Task, 'duration_secs': 0.289681} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1244.979105] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Reconfigured VM instance instance-0000007f to attach disk [datastore1] 2255e878-8890-46cb-a0b2-863702743691/2255e878-8890-46cb-a0b2-863702743691.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1244.979911] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3fd09b17-2954-4ddd-8f00-8c92b3271da1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1244.992286] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1244.992286] env[69475]: value = "task-3509208" [ 1244.992286] env[69475]: _type = "Task" [ 1244.992286] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.001309] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509208, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.035444] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]525f67fb-0a9e-815e-fed6-8a7875d153ec, 'name': SearchDatastore_Task, 'duration_secs': 0.023522} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.035747] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1245.035981] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1245.036233] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.036434] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1245.036590] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1245.036859] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-525cd58e-9a45-4c98-b40e-309acb38c5c8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.050086] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1245.050316] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1245.051128] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-123495f1-3ba3-4124-969e-1db6ea48ce52 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.058610] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1245.058610] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a6d42b-3ec5-d865-5c2b-e4deb582785f" [ 1245.058610] env[69475]: _type = "Task" [ 1245.058610] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.067269] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a6d42b-3ec5-d865-5c2b-e4deb582785f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.079065] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "refresh_cache-319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1245.079065] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquired lock "refresh_cache-319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1245.079065] env[69475]: DEBUG nova.network.neutron [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1245.079065] env[69475]: DEBUG nova.objects.instance [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lazy-loading 'info_cache' on Instance uuid 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1245.332988] env[69475]: DEBUG oslo_vmware.api [None req-cfdcee81-20b7-4020-8efe-0eab5f8414f4 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509206, 'name': PowerOffVM_Task, 'duration_secs': 0.19551} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.333411] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfdcee81-20b7-4020-8efe-0eab5f8414f4 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1245.333505] env[69475]: DEBUG nova.compute.manager [None req-cfdcee81-20b7-4020-8efe-0eab5f8414f4 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1245.334241] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9176e290-58db-44af-86d3-7a39766e2c01 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.436330] env[69475]: DEBUG oslo_vmware.api [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509207, 'name': PowerOnVM_Task, 'duration_secs': 0.501925} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.436779] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1245.436823] env[69475]: INFO nova.compute.manager [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Took 6.31 seconds to spawn the instance on the hypervisor. [ 1245.436968] env[69475]: DEBUG nova.compute.manager [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1245.437792] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1969fc-73ca-4922-949d-b4862d2e2644 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.502040] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509208, 'name': Rename_Task, 'duration_secs': 0.167841} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.502322] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1245.502564] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-76c31d42-03ea-4d65-be8e-63a727968fce {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.511383] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1245.511383] env[69475]: value = "task-3509209" [ 1245.511383] env[69475]: _type = "Task" [ 1245.511383] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.521200] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509209, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.569319] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52a6d42b-3ec5-d865-5c2b-e4deb582785f, 'name': SearchDatastore_Task, 'duration_secs': 0.011225} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1245.570110] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81da7c02-fc90-4ccd-ab83-c2280186e591 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.575733] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1245.575733] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5295d3b1-0fd6-d99b-74c2-ecab4cf4bc9b" [ 1245.575733] env[69475]: _type = "Task" [ 1245.575733] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1245.585647] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5295d3b1-0fd6-d99b-74c2-ecab4cf4bc9b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1245.847688] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cfdcee81-20b7-4020-8efe-0eab5f8414f4 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.051s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.957061] env[69475]: INFO nova.compute.manager [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Took 18.68 seconds to build instance. [ 1246.022905] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509209, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.088684] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5295d3b1-0fd6-d99b-74c2-ecab4cf4bc9b, 'name': SearchDatastore_Task, 'duration_secs': 0.042247} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.088684] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1246.088902] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] cdd3f5d9-c980-41ff-92b4-14948ee00631/cdd3f5d9-c980-41ff-92b4-14948ee00631.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1246.089906] env[69475]: DEBUG nova.objects.instance [None req-4743678f-ef8f-4b56-9c44-cd770daa3ad1 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lazy-loading 'flavor' on Instance uuid d63ddc35-06b3-43a2-bdd5-a91cf4047a4b {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1246.091215] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e1e1b6e7-1276-41a3-b546-7febcc018df6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.101261] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1246.101261] env[69475]: value = "task-3509210" [ 1246.101261] env[69475]: _type = "Task" [ 1246.101261] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.111174] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509210, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.373135] env[69475]: DEBUG nova.network.neutron [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updating instance_info_cache with network_info: [{"id": "789f3dcb-d9c8-495b-b66c-896fb31e0e63", "address": "fa:16:3e:b0:88:42", "network": {"id": "f13498a3-434e-4d1b-a9e8-f7af6074b41b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-661590424-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.247", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e87670cfd2b848af98507a5ebf9fab51", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee9ce73d-4ee8-4b28-b7d3-3a5735039627", "external-id": "cl2-zone-465", "segmentation_id": 465, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap789f3dcb-d9", "ovs_interfaceid": "789f3dcb-d9c8-495b-b66c-896fb31e0e63", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1246.459570] env[69475]: DEBUG oslo_concurrency.lockutils [None req-9e5d4487-95e7-42b4-883c-3542618b4b5c tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "edec6d3e-1881-4d6a-9e0f-c9a177e334ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.195s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1246.526068] env[69475]: DEBUG oslo_vmware.api [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509209, 'name': PowerOnVM_Task, 'duration_secs': 0.784354} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.526398] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1246.526620] env[69475]: INFO nova.compute.manager [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Took 5.18 seconds to spawn the instance on the hypervisor. [ 1246.526799] env[69475]: DEBUG nova.compute.manager [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1246.527728] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ece3630-8461-477a-b96a-0bd1c857b7a6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.597023] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4743678f-ef8f-4b56-9c44-cd770daa3ad1 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "refresh_cache-d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1246.597236] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4743678f-ef8f-4b56-9c44-cd770daa3ad1 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquired lock "refresh_cache-d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1246.597443] env[69475]: DEBUG nova.network.neutron [None req-4743678f-ef8f-4b56-9c44-cd770daa3ad1 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1246.597643] env[69475]: DEBUG nova.objects.instance [None req-4743678f-ef8f-4b56-9c44-cd770daa3ad1 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lazy-loading 'info_cache' on Instance uuid d63ddc35-06b3-43a2-bdd5-a91cf4047a4b {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1246.612606] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509210, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489629} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1246.613380] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] cdd3f5d9-c980-41ff-92b4-14948ee00631/cdd3f5d9-c980-41ff-92b4-14948ee00631.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1246.613603] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1246.613842] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f7f31708-a82b-42da-86b5-6fc6fe1b3776 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.623532] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1246.623532] env[69475]: value = "task-3509211" [ 1246.623532] env[69475]: _type = "Task" [ 1246.623532] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.633604] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509211, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.878119] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Releasing lock "refresh_cache-319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1246.878419] env[69475]: DEBUG nova.objects.instance [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lazy-loading 'migration_context' on Instance uuid 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1247.003553] env[69475]: DEBUG nova.compute.manager [req-a75d45df-5cb7-482c-a33c-6e174739b18c req-36cadaa3-187f-49de-b352-7f0850760b4d service nova] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Received event network-changed-1243d440-897a-44e6-8f1e-2fbd61a5922f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1247.003747] env[69475]: DEBUG nova.compute.manager [req-a75d45df-5cb7-482c-a33c-6e174739b18c req-36cadaa3-187f-49de-b352-7f0850760b4d service nova] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Refreshing instance network info cache due to event network-changed-1243d440-897a-44e6-8f1e-2fbd61a5922f. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1247.003961] env[69475]: DEBUG oslo_concurrency.lockutils [req-a75d45df-5cb7-482c-a33c-6e174739b18c req-36cadaa3-187f-49de-b352-7f0850760b4d service nova] Acquiring lock "refresh_cache-edec6d3e-1881-4d6a-9e0f-c9a177e334ad" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1247.004126] env[69475]: DEBUG oslo_concurrency.lockutils [req-a75d45df-5cb7-482c-a33c-6e174739b18c req-36cadaa3-187f-49de-b352-7f0850760b4d service nova] Acquired lock "refresh_cache-edec6d3e-1881-4d6a-9e0f-c9a177e334ad" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1247.004310] env[69475]: DEBUG nova.network.neutron [req-a75d45df-5cb7-482c-a33c-6e174739b18c req-36cadaa3-187f-49de-b352-7f0850760b4d service nova] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Refreshing network info cache for port 1243d440-897a-44e6-8f1e-2fbd61a5922f {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1247.044309] env[69475]: INFO nova.compute.manager [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Took 16.44 seconds to build instance. [ 1247.101130] env[69475]: DEBUG nova.objects.base [None req-4743678f-ef8f-4b56-9c44-cd770daa3ad1 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1247.134201] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509211, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070603} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.134426] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1247.135194] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d7b693-de74-46df-8db5-ef4839a1ecc0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.155581] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Reconfiguring VM instance instance-0000007d to attach disk [datastore2] cdd3f5d9-c980-41ff-92b4-14948ee00631/cdd3f5d9-c980-41ff-92b4-14948ee00631.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1247.155822] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca11953c-3179-4a79-afda-ecf8255d7b69 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.176048] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1247.176048] env[69475]: value = "task-3509212" [ 1247.176048] env[69475]: _type = "Task" [ 1247.176048] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.187486] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509212, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.381319] env[69475]: DEBUG nova.objects.base [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Object Instance<319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2> lazy-loaded attributes: info_cache,migration_context {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1247.382339] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d0c4d1b-3c72-4ca9-b498-4ceae85d9745 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.388071] env[69475]: DEBUG oslo_concurrency.lockutils [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "da3eff39-b80b-4574-9b07-df6f679a9f38" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1247.388262] env[69475]: DEBUG oslo_concurrency.lockutils [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "da3eff39-b80b-4574-9b07-df6f679a9f38" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1247.403798] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-add5b9e1-6c74-444e-9e59-44219178b560 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.411075] env[69475]: DEBUG oslo_vmware.api [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1247.411075] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cdd915-c435-8160-f9d4-6cccfb5f6cae" [ 1247.411075] env[69475]: _type = "Task" [ 1247.411075] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.421662] env[69475]: DEBUG oslo_vmware.api [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cdd915-c435-8160-f9d4-6cccfb5f6cae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.545454] env[69475]: DEBUG oslo_concurrency.lockutils [None req-095bc96f-fac9-49e9-9527-f283444d84c4 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Lock "2255e878-8890-46cb-a0b2-863702743691" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.951s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1247.688175] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509212, 'name': ReconfigVM_Task} progress is 14%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.753024] env[69475]: DEBUG nova.network.neutron [req-a75d45df-5cb7-482c-a33c-6e174739b18c req-36cadaa3-187f-49de-b352-7f0850760b4d service nova] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Updated VIF entry in instance network info cache for port 1243d440-897a-44e6-8f1e-2fbd61a5922f. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1247.753130] env[69475]: DEBUG nova.network.neutron [req-a75d45df-5cb7-482c-a33c-6e174739b18c req-36cadaa3-187f-49de-b352-7f0850760b4d service nova] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Updating instance_info_cache with network_info: [{"id": "1243d440-897a-44e6-8f1e-2fbd61a5922f", "address": "fa:16:3e:27:16:3b", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1243d440-89", "ovs_interfaceid": "1243d440-897a-44e6-8f1e-2fbd61a5922f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.819955] env[69475]: DEBUG nova.network.neutron [None req-4743678f-ef8f-4b56-9c44-cd770daa3ad1 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Updating instance_info_cache with network_info: [{"id": "face26ac-c45b-4932-b32e-bd2d172da60d", "address": "fa:16:3e:83:66:4e", "network": {"id": "5996fb14-ef37-4ad6-bdc7-a1fe757f6765", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-259172914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.176", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9101c50cbfe74c99b1e1a528cb5b5994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55520f67-d092-4eb7-940f-d7cceaa1ca1c", "external-id": "nsx-vlan-transportzone-717", "segmentation_id": 717, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapface26ac-c4", "ovs_interfaceid": "face26ac-c45b-4932-b32e-bd2d172da60d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1247.905015] env[69475]: DEBUG nova.compute.utils [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1247.922513] env[69475]: DEBUG oslo_vmware.api [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52cdd915-c435-8160-f9d4-6cccfb5f6cae, 'name': SearchDatastore_Task, 'duration_secs': 0.016573} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.922880] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1247.923198] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1248.189618] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509212, 'name': ReconfigVM_Task, 'duration_secs': 0.790982} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.189863] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Reconfigured VM instance instance-0000007d to attach disk [datastore2] cdd3f5d9-c980-41ff-92b4-14948ee00631/cdd3f5d9-c980-41ff-92b4-14948ee00631.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1248.190522] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-26483855-0544-4f20-a572-8f130f7486b1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.198149] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1248.198149] env[69475]: value = "task-3509213" [ 1248.198149] env[69475]: _type = "Task" [ 1248.198149] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.206579] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509213, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.256507] env[69475]: DEBUG oslo_concurrency.lockutils [req-a75d45df-5cb7-482c-a33c-6e174739b18c req-36cadaa3-187f-49de-b352-7f0850760b4d service nova] Releasing lock "refresh_cache-edec6d3e-1881-4d6a-9e0f-c9a177e334ad" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1248.322568] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4743678f-ef8f-4b56-9c44-cd770daa3ad1 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Releasing lock "refresh_cache-d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1248.356956] env[69475]: INFO nova.compute.manager [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Rebuilding instance [ 1248.398106] env[69475]: DEBUG nova.compute.manager [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1248.399352] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8db39a81-c692-4caf-9bae-c42a512101d2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.408945] env[69475]: DEBUG oslo_concurrency.lockutils [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "da3eff39-b80b-4574-9b07-df6f679a9f38" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.020s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1248.554073] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a2ff89-f64e-4100-beac-83202b2f97e5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.564398] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfad36a2-98f6-4ebf-837d-45c1ee566bd1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.600227] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d65623a-1492-4923-88e0-051310952dbb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.608723] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a84854a-bbd4-4146-8165-08e7f1d5dabd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.623139] env[69475]: DEBUG nova.compute.provider_tree [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1248.708236] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509213, 'name': Rename_Task, 'duration_secs': 0.141311} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.708449] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1248.708728] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-efd5cbbd-50ec-4ed1-a3cc-84357fd00b02 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.715773] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1248.715773] env[69475]: value = "task-3509214" [ 1248.715773] env[69475]: _type = "Task" [ 1248.715773] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.725551] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509214, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.125723] env[69475]: DEBUG nova.scheduler.client.report [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1249.227455] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509214, 'name': PowerOnVM_Task} progress is 89%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.330929] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4743678f-ef8f-4b56-9c44-cd770daa3ad1 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1249.330929] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-83a294e7-b8df-454d-9111-acd5d37df25c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.340315] env[69475]: DEBUG oslo_vmware.api [None req-4743678f-ef8f-4b56-9c44-cd770daa3ad1 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1249.340315] env[69475]: value = "task-3509215" [ 1249.340315] env[69475]: _type = "Task" [ 1249.340315] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.349811] env[69475]: DEBUG oslo_vmware.api [None req-4743678f-ef8f-4b56-9c44-cd770daa3ad1 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509215, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.413998] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1249.414374] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e6a7a2d-dd75-4026-9cdb-a78f594932c0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.422734] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1249.422734] env[69475]: value = "task-3509216" [ 1249.422734] env[69475]: _type = "Task" [ 1249.422734] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.435280] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509216, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.483335] env[69475]: DEBUG oslo_concurrency.lockutils [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "da3eff39-b80b-4574-9b07-df6f679a9f38" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1249.483567] env[69475]: DEBUG oslo_concurrency.lockutils [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "da3eff39-b80b-4574-9b07-df6f679a9f38" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1249.483782] env[69475]: INFO nova.compute.manager [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Attaching volume 962264e0-88c9-4fe4-9e1c-b9215dfde6ed to /dev/sdb [ 1249.518172] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc95f7e-c778-439c-a710-107b003267f2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.526191] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31617daa-559c-44fa-afd5-faa281095cfa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.541250] env[69475]: DEBUG nova.virt.block_device [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Updating existing volume attachment record: 3e49ee43-221c-420f-9dfd-f2b1ff0fcbdd {{(pid=69475) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1249.727444] env[69475]: DEBUG oslo_vmware.api [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509214, 'name': PowerOnVM_Task, 'duration_secs': 0.736551} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.727696] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1249.727882] env[69475]: DEBUG nova.compute.manager [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1249.728662] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363971f7-9798-4ba8-8c48-1d1fc55fcde8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.850792] env[69475]: DEBUG oslo_vmware.api [None req-4743678f-ef8f-4b56-9c44-cd770daa3ad1 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509215, 'name': PowerOnVM_Task, 'duration_secs': 0.482113} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.851040] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4743678f-ef8f-4b56-9c44-cd770daa3ad1 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1249.851203] env[69475]: DEBUG nova.compute.manager [None req-4743678f-ef8f-4b56-9c44-cd770daa3ad1 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1249.851961] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db1d180-2e0e-43f2-94b6-d25445d569fe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.933379] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509216, 'name': PowerOffVM_Task, 'duration_secs': 0.177417} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.933674] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1249.934423] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1249.935242] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-601a9eb4-7c2f-49aa-aab1-5dcaf98f5933 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.943046] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1249.943294] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d51f5b7b-aafa-4b6d-b9a3-1c174e5d4d51 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.973633] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1249.973847] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1249.974041] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Deleting the datastore file [datastore1] 2255e878-8890-46cb-a0b2-863702743691 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1249.974353] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8438a487-3632-4ecc-87b0-ba411c324dce {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.981813] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1249.981813] env[69475]: value = "task-3509219" [ 1249.981813] env[69475]: _type = "Task" [ 1249.981813] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.990106] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509219, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.136837] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.213s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1250.244290] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1250.244567] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1250.244791] env[69475]: DEBUG nova.objects.instance [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69475) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1250.492426] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509219, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.378587} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.492617] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1250.492871] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1250.493098] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1250.698325] env[69475]: INFO nova.scheduler.client.report [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleted allocation for migration 7ad5e6f9-82b2-44d2-8fef-87f39d7f662d [ 1250.797088] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "cdd3f5d9-c980-41ff-92b4-14948ee00631" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1250.797507] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "cdd3f5d9-c980-41ff-92b4-14948ee00631" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1250.797507] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "cdd3f5d9-c980-41ff-92b4-14948ee00631-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1250.797728] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "cdd3f5d9-c980-41ff-92b4-14948ee00631-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1250.798076] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "cdd3f5d9-c980-41ff-92b4-14948ee00631-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1250.801094] env[69475]: INFO nova.compute.manager [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Terminating instance [ 1251.036092] env[69475]: INFO nova.compute.manager [None req-5cbf574a-f990-4cff-b353-9618963f1a14 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Get console output [ 1251.036531] env[69475]: WARNING nova.virt.vmwareapi.driver [None req-5cbf574a-f990-4cff-b353-9618963f1a14 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] The console log is missing. Check your VSPC configuration [ 1251.204178] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4665d5ea-59c2-4649-adf2-d682af581c8a tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.702s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.252194] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3dfd058c-c089-4294-89de-aa6de46d0b37 tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.007s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.307060] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "refresh_cache-cdd3f5d9-c980-41ff-92b4-14948ee00631" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1251.307060] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquired lock "refresh_cache-cdd3f5d9-c980-41ff-92b4-14948ee00631" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1251.307060] env[69475]: DEBUG nova.network.neutron [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1251.576617] env[69475]: DEBUG nova.virt.hardware [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1251.576881] env[69475]: DEBUG nova.virt.hardware [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1251.577058] env[69475]: DEBUG nova.virt.hardware [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1251.577246] env[69475]: DEBUG nova.virt.hardware [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1251.577408] env[69475]: DEBUG nova.virt.hardware [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1251.577609] env[69475]: DEBUG nova.virt.hardware [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1251.577831] env[69475]: DEBUG nova.virt.hardware [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1251.577993] env[69475]: DEBUG nova.virt.hardware [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1251.578182] env[69475]: DEBUG nova.virt.hardware [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1251.578347] env[69475]: DEBUG nova.virt.hardware [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1251.578523] env[69475]: DEBUG nova.virt.hardware [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1251.579415] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8beaf58e-44e7-4ae9-8769-c9a2943b6a69 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.587617] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f213198-9ce8-4139-b872-a4f57206cc09 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.601131] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Instance VIF info [] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1251.606756] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1251.606978] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2255e878-8890-46cb-a0b2-863702743691] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1251.607190] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a7c4018-8d9f-4ca1-ba4d-82fb2fc19cbc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.623430] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1251.623430] env[69475]: value = "task-3509220" [ 1251.623430] env[69475]: _type = "Task" [ 1251.623430] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1251.632055] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509220, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1251.828535] env[69475]: DEBUG nova.network.neutron [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1251.880726] env[69475]: DEBUG nova.network.neutron [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.134689] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509220, 'name': CreateVM_Task, 'duration_secs': 0.335647} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.134864] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2255e878-8890-46cb-a0b2-863702743691] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1252.135326] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.135506] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1252.135858] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1252.136160] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e8ea98c-df81-4fdc-9083-15403143bce0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.141276] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1252.141276] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5224e6f0-c312-4fd9-b329-7e3f2d7c7ab3" [ 1252.141276] env[69475]: _type = "Task" [ 1252.141276] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.150100] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5224e6f0-c312-4fd9-b329-7e3f2d7c7ab3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.383574] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Releasing lock "refresh_cache-cdd3f5d9-c980-41ff-92b4-14948ee00631" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1252.384100] env[69475]: DEBUG nova.compute.manager [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1252.384317] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1252.385215] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02621b2e-7977-48ae-bda0-e9714c88f0e1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.393597] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1252.393845] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d099de02-1834-4a92-af5f-2f85cd810777 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.400636] env[69475]: DEBUG oslo_vmware.api [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1252.400636] env[69475]: value = "task-3509222" [ 1252.400636] env[69475]: _type = "Task" [ 1252.400636] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.410241] env[69475]: DEBUG oslo_vmware.api [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509222, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.652219] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5224e6f0-c312-4fd9-b329-7e3f2d7c7ab3, 'name': SearchDatastore_Task, 'duration_secs': 0.016564} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.652831] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1252.652934] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1252.653124] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.653270] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Acquired lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1252.653445] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1252.653729] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef985ba9-2e9b-4eb7-83fc-ec1b5b7a6bf8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.663552] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1252.663741] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1252.664462] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6e10038-346c-4eb3-a977-df9391b289d6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.670078] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1252.670078] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5214a789-a529-177d-d684-b455eccce25e" [ 1252.670078] env[69475]: _type = "Task" [ 1252.670078] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.678542] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5214a789-a529-177d-d684-b455eccce25e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1252.911791] env[69475]: DEBUG oslo_vmware.api [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509222, 'name': PowerOffVM_Task, 'duration_secs': 0.287832} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1252.912176] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1252.912232] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1252.912453] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9fb6bd2d-522c-47b9-ade7-29a319cdeea7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.942864] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1252.943124] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1252.943451] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Deleting the datastore file [datastore2] cdd3f5d9-c980-41ff-92b4-14948ee00631 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1252.943760] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-825b9c9d-85b4-4331-b5cf-e8e9f75b23a4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.952452] env[69475]: DEBUG oslo_vmware.api [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1252.952452] env[69475]: value = "task-3509224" [ 1252.952452] env[69475]: _type = "Task" [ 1252.952452] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.961311] env[69475]: DEBUG oslo_vmware.api [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509224, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.180523] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5214a789-a529-177d-d684-b455eccce25e, 'name': SearchDatastore_Task, 'duration_secs': 0.014337} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.181325] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a29c791a-d27f-4789-8e9f-e69c2e6c7a6d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.187224] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1253.187224] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5247d702-2255-3cae-5054-e31f1463eb4e" [ 1253.187224] env[69475]: _type = "Task" [ 1253.187224] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.195508] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5247d702-2255-3cae-5054-e31f1463eb4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.461945] env[69475]: DEBUG oslo_vmware.api [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509224, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.191344} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.462225] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1253.462411] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1253.462624] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1253.462799] env[69475]: INFO nova.compute.manager [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1253.463047] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1253.463245] env[69475]: DEBUG nova.compute.manager [-] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1253.463340] env[69475]: DEBUG nova.network.neutron [-] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1253.478925] env[69475]: DEBUG nova.network.neutron [-] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1253.698302] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5247d702-2255-3cae-5054-e31f1463eb4e, 'name': SearchDatastore_Task, 'duration_secs': 0.022434} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1253.698486] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Releasing lock "[datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1253.698750] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 2255e878-8890-46cb-a0b2-863702743691/2255e878-8890-46cb-a0b2-863702743691.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1253.698996] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f9b3e6db-209c-4b64-a994-b6c36a26b678 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.706971] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1253.706971] env[69475]: value = "task-3509225" [ 1253.706971] env[69475]: _type = "Task" [ 1253.706971] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.714998] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509225, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.981542] env[69475]: DEBUG nova.network.neutron [-] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1254.093013] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Volume attach. Driver type: vmdk {{(pid=69475) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1254.093320] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701165', 'volume_id': '962264e0-88c9-4fe4-9e1c-b9215dfde6ed', 'name': 'volume-962264e0-88c9-4fe4-9e1c-b9215dfde6ed', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'da3eff39-b80b-4574-9b07-df6f679a9f38', 'attached_at': '', 'detached_at': '', 'volume_id': '962264e0-88c9-4fe4-9e1c-b9215dfde6ed', 'serial': '962264e0-88c9-4fe4-9e1c-b9215dfde6ed'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1254.094346] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca479631-366c-4957-975a-5ebdb7444a34 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.113627] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc5f876a-2f79-44c0-b5e0-65ca8b8ef79a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.145515] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] volume-962264e0-88c9-4fe4-9e1c-b9215dfde6ed/volume-962264e0-88c9-4fe4-9e1c-b9215dfde6ed.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1254.146767] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6947e85f-aa29-4c0a-9646-764bca1a2537 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.168548] env[69475]: DEBUG oslo_vmware.api [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1254.168548] env[69475]: value = "task-3509226" [ 1254.168548] env[69475]: _type = "Task" [ 1254.168548] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.177659] env[69475]: DEBUG oslo_vmware.api [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509226, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.218228] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509225, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.483960] env[69475]: INFO nova.compute.manager [-] [instance: cdd3f5d9-c980-41ff-92b4-14948ee00631] Took 1.02 seconds to deallocate network for instance. [ 1254.621082] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.621372] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1254.621552] env[69475]: INFO nova.compute.manager [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Shelving [ 1254.678522] env[69475]: DEBUG oslo_vmware.api [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509226, 'name': ReconfigVM_Task, 'duration_secs': 0.430479} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.678849] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Reconfigured VM instance instance-00000079 to attach disk [datastore2] volume-962264e0-88c9-4fe4-9e1c-b9215dfde6ed/volume-962264e0-88c9-4fe4-9e1c-b9215dfde6ed.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1254.683900] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ab7eb32-ddb0-4b94-9d47-d00e61a60150 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.702098] env[69475]: DEBUG oslo_vmware.api [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1254.702098] env[69475]: value = "task-3509227" [ 1254.702098] env[69475]: _type = "Task" [ 1254.702098] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.711211] env[69475]: DEBUG oslo_vmware.api [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509227, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.725190] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509225, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.570061} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.725416] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore1] 2255e878-8890-46cb-a0b2-863702743691/2255e878-8890-46cb-a0b2-863702743691.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1254.725635] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1254.725887] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e66c4f19-de28-4939-a699-df7fa0ce46ee {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.732792] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1254.732792] env[69475]: value = "task-3509228" [ 1254.732792] env[69475]: _type = "Task" [ 1254.732792] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.742183] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509228, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.991136] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.991544] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1254.991666] env[69475]: DEBUG nova.objects.instance [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lazy-loading 'resources' on Instance uuid cdd3f5d9-c980-41ff-92b4-14948ee00631 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1255.212013] env[69475]: DEBUG oslo_vmware.api [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509227, 'name': ReconfigVM_Task, 'duration_secs': 0.144205} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.213528] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701165', 'volume_id': '962264e0-88c9-4fe4-9e1c-b9215dfde6ed', 'name': 'volume-962264e0-88c9-4fe4-9e1c-b9215dfde6ed', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'da3eff39-b80b-4574-9b07-df6f679a9f38', 'attached_at': '', 'detached_at': '', 'volume_id': '962264e0-88c9-4fe4-9e1c-b9215dfde6ed', 'serial': '962264e0-88c9-4fe4-9e1c-b9215dfde6ed'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1255.242045] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509228, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.058117} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.242273] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1255.243087] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d49c0c97-1b6e-4d6f-89b3-ee4bc133e459 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.262925] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] 2255e878-8890-46cb-a0b2-863702743691/2255e878-8890-46cb-a0b2-863702743691.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1255.263416] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-633366c5-ab3b-402c-8870-ad5fbd1a1fcd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.284075] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1255.284075] env[69475]: value = "task-3509229" [ 1255.284075] env[69475]: _type = "Task" [ 1255.284075] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.625265] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df5db57-5335-4281-8a77-36f1662d0a55 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.630774] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1255.632762] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b8ff5772-876b-4dd0-9840-52df28cf6002 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.635027] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7a7125e-b152-477b-a28c-3cfdcca8bce1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.668230] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cce4ddb-b616-45a2-b81b-b2c701503120 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.671105] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1255.671105] env[69475]: value = "task-3509230" [ 1255.671105] env[69475]: _type = "Task" [ 1255.671105] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.678119] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ee01c9-c6ce-404a-a4f7-5baed9b79888 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.684923] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509230, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.696306] env[69475]: DEBUG nova.compute.provider_tree [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1255.794837] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509229, 'name': ReconfigVM_Task, 'duration_secs': 0.286593} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.795109] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Reconfigured VM instance instance-0000007f to attach disk [datastore1] 2255e878-8890-46cb-a0b2-863702743691/2255e878-8890-46cb-a0b2-863702743691.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1255.795766] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36f2121b-0c81-45f5-8cb3-ed207359269e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.803577] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1255.803577] env[69475]: value = "task-3509231" [ 1255.803577] env[69475]: _type = "Task" [ 1255.803577] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.813598] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509231, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.181761] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509230, 'name': PowerOffVM_Task, 'duration_secs': 0.268395} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.182141] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1256.182745] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4120ed7-fee0-4076-9665-70288f0ec3ff {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.201647] env[69475]: DEBUG nova.scheduler.client.report [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1256.205193] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24628d01-c180-4139-a31d-8e667d308c37 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.248696] env[69475]: DEBUG nova.objects.instance [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lazy-loading 'flavor' on Instance uuid da3eff39-b80b-4574-9b07-df6f679a9f38 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1256.314723] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509231, 'name': Rename_Task, 'duration_secs': 0.14991} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.315012] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1256.315327] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8f5b830-e27b-4958-98c0-3153a8bc1db3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.323021] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1256.323021] env[69475]: value = "task-3509232" [ 1256.323021] env[69475]: _type = "Task" [ 1256.323021] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.331377] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509232, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.708985] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.717s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1256.715879] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Creating Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1256.716309] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-1b86d58e-19fb-4f21-9d6c-3d402eb24eb1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.726313] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1256.726313] env[69475]: value = "task-3509233" [ 1256.726313] env[69475]: _type = "Task" [ 1256.726313] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.738536] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509233, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.739769] env[69475]: INFO nova.scheduler.client.report [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Deleted allocations for instance cdd3f5d9-c980-41ff-92b4-14948ee00631 [ 1256.753682] env[69475]: DEBUG oslo_concurrency.lockutils [None req-20493981-b579-4d0a-a7c6-3a5bccd3420d tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "da3eff39-b80b-4574-9b07-df6f679a9f38" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.269s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1256.834462] env[69475]: DEBUG oslo_vmware.api [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509232, 'name': PowerOnVM_Task, 'duration_secs': 0.425707} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.834750] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1256.834978] env[69475]: DEBUG nova.compute.manager [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1256.835795] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d9eadb-c542-43e5-80d6-aac506309042 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.941588] env[69475]: DEBUG oslo_concurrency.lockutils [None req-77b2ef39-ec07-4601-be9e-d34b47d46109 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "da3eff39-b80b-4574-9b07-df6f679a9f38" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1256.941823] env[69475]: DEBUG oslo_concurrency.lockutils [None req-77b2ef39-ec07-4601-be9e-d34b47d46109 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "da3eff39-b80b-4574-9b07-df6f679a9f38" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.237305] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509233, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.247556] env[69475]: DEBUG oslo_concurrency.lockutils [None req-8b4959a0-1c42-4a7b-b426-0c6a99d6caba tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "cdd3f5d9-c980-41ff-92b4-14948ee00631" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.450s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1257.354863] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1257.355155] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1257.355338] env[69475]: DEBUG nova.objects.instance [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Trying to apply a migration context that does not seem to be set for this instance {{(pid=69475) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1257.445285] env[69475]: INFO nova.compute.manager [None req-77b2ef39-ec07-4601-be9e-d34b47d46109 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Detaching volume 962264e0-88c9-4fe4-9e1c-b9215dfde6ed [ 1257.480469] env[69475]: INFO nova.virt.block_device [None req-77b2ef39-ec07-4601-be9e-d34b47d46109 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Attempting to driver detach volume 962264e0-88c9-4fe4-9e1c-b9215dfde6ed from mountpoint /dev/sdb [ 1257.480678] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-77b2ef39-ec07-4601-be9e-d34b47d46109 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1257.480916] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-77b2ef39-ec07-4601-be9e-d34b47d46109 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701165', 'volume_id': '962264e0-88c9-4fe4-9e1c-b9215dfde6ed', 'name': 'volume-962264e0-88c9-4fe4-9e1c-b9215dfde6ed', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'da3eff39-b80b-4574-9b07-df6f679a9f38', 'attached_at': '', 'detached_at': '', 'volume_id': '962264e0-88c9-4fe4-9e1c-b9215dfde6ed', 'serial': '962264e0-88c9-4fe4-9e1c-b9215dfde6ed'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1257.481878] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fedc1460-ccbe-4780-812d-ce1d48e427b0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.504282] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92080c02-ab11-4ba6-8512-0e950c9c4dae {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.512571] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05356eb3-b5ef-4708-838f-043595791dab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.533577] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8f192b-5c57-4437-aa9a-0e2b66f80e5e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.550760] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-77b2ef39-ec07-4601-be9e-d34b47d46109 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] The volume has not been displaced from its original location: [datastore2] volume-962264e0-88c9-4fe4-9e1c-b9215dfde6ed/volume-962264e0-88c9-4fe4-9e1c-b9215dfde6ed.vmdk. No consolidation needed. {{(pid=69475) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1257.555925] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-77b2ef39-ec07-4601-be9e-d34b47d46109 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Reconfiguring VM instance instance-00000079 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1257.556285] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97b16037-4176-4f57-8515-1f7f6096e2e2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.576261] env[69475]: DEBUG oslo_vmware.api [None req-77b2ef39-ec07-4601-be9e-d34b47d46109 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1257.576261] env[69475]: value = "task-3509234" [ 1257.576261] env[69475]: _type = "Task" [ 1257.576261] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.588323] env[69475]: DEBUG oslo_vmware.api [None req-77b2ef39-ec07-4601-be9e-d34b47d46109 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509234, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.739829] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509233, 'name': CreateSnapshot_Task, 'duration_secs': 0.632106} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.740311] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Created Snapshot of the VM instance {{(pid=69475) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1257.740947] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4f9588c-f327-496b-ba30-7ea1621fb286 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.087273] env[69475]: DEBUG oslo_vmware.api [None req-77b2ef39-ec07-4601-be9e-d34b47d46109 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509234, 'name': ReconfigVM_Task, 'duration_secs': 0.239954} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.087547] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-77b2ef39-ec07-4601-be9e-d34b47d46109 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Reconfigured VM instance instance-00000079 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1258.092151] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ab60e176-163e-4b98-b57b-f3954b40f657 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.108522] env[69475]: DEBUG oslo_vmware.api [None req-77b2ef39-ec07-4601-be9e-d34b47d46109 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1258.108522] env[69475]: value = "task-3509235" [ 1258.108522] env[69475]: _type = "Task" [ 1258.108522] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.116496] env[69475]: DEBUG oslo_vmware.api [None req-77b2ef39-ec07-4601-be9e-d34b47d46109 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509235, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.260597] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Creating linked-clone VM from snapshot {{(pid=69475) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1258.260963] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8f790fee-1e37-4a13-803e-5cd3a7bad8bb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.269861] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1258.269861] env[69475]: value = "task-3509236" [ 1258.269861] env[69475]: _type = "Task" [ 1258.269861] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.278372] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509236, 'name': CloneVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.330404] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Acquiring lock "2255e878-8890-46cb-a0b2-863702743691" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1258.330684] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Lock "2255e878-8890-46cb-a0b2-863702743691" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1258.330903] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Acquiring lock "2255e878-8890-46cb-a0b2-863702743691-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1258.331152] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Lock "2255e878-8890-46cb-a0b2-863702743691-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1258.331334] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Lock "2255e878-8890-46cb-a0b2-863702743691-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1258.333664] env[69475]: INFO nova.compute.manager [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Terminating instance [ 1258.340152] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "51667db3-801d-4a59-b4ee-220cbf638728" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1258.340432] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "51667db3-801d-4a59-b4ee-220cbf638728" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1258.340703] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "51667db3-801d-4a59-b4ee-220cbf638728-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1258.340978] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "51667db3-801d-4a59-b4ee-220cbf638728-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1258.341265] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "51667db3-801d-4a59-b4ee-220cbf638728-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1258.343595] env[69475]: INFO nova.compute.manager [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Terminating instance [ 1258.363337] env[69475]: DEBUG oslo_concurrency.lockutils [None req-51c820da-6bc1-479f-9663-4568b4471eff tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1258.620213] env[69475]: DEBUG oslo_vmware.api [None req-77b2ef39-ec07-4601-be9e-d34b47d46109 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509235, 'name': ReconfigVM_Task, 'duration_secs': 0.14226} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.620860] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-77b2ef39-ec07-4601-be9e-d34b47d46109 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701165', 'volume_id': '962264e0-88c9-4fe4-9e1c-b9215dfde6ed', 'name': 'volume-962264e0-88c9-4fe4-9e1c-b9215dfde6ed', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'da3eff39-b80b-4574-9b07-df6f679a9f38', 'attached_at': '', 'detached_at': '', 'volume_id': '962264e0-88c9-4fe4-9e1c-b9215dfde6ed', 'serial': '962264e0-88c9-4fe4-9e1c-b9215dfde6ed'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1258.780947] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509236, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.838543] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Acquiring lock "refresh_cache-2255e878-8890-46cb-a0b2-863702743691" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1258.838827] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Acquired lock "refresh_cache-2255e878-8890-46cb-a0b2-863702743691" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1258.838916] env[69475]: DEBUG nova.network.neutron [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1258.848850] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "refresh_cache-51667db3-801d-4a59-b4ee-220cbf638728" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1258.848998] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquired lock "refresh_cache-51667db3-801d-4a59-b4ee-220cbf638728" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1258.849155] env[69475]: DEBUG nova.network.neutron [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1259.162178] env[69475]: DEBUG nova.objects.instance [None req-77b2ef39-ec07-4601-be9e-d34b47d46109 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lazy-loading 'flavor' on Instance uuid da3eff39-b80b-4574-9b07-df6f679a9f38 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1259.281990] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509236, 'name': CloneVM_Task} progress is 94%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.358229] env[69475]: DEBUG nova.network.neutron [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1259.367098] env[69475]: DEBUG nova.network.neutron [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1259.415067] env[69475]: DEBUG nova.network.neutron [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.416894] env[69475]: DEBUG nova.network.neutron [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.782587] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509236, 'name': CloneVM_Task} progress is 95%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.920056] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Releasing lock "refresh_cache-51667db3-801d-4a59-b4ee-220cbf638728" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1259.920056] env[69475]: DEBUG nova.compute.manager [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1259.920287] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1259.920538] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Releasing lock "refresh_cache-2255e878-8890-46cb-a0b2-863702743691" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1259.920854] env[69475]: DEBUG nova.compute.manager [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1259.921043] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1259.921847] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab42eec-8158-468d-956b-f3c46fb9ee80 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.925098] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8c1334-9d4c-45fd-9859-39f6fa078a7d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.932880] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1259.934790] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4492465-9a9b-4397-a2e5-e2a9a2923e06 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.936058] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1259.936568] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bbbf6eed-81af-4fd3-89f9-8836f189b4a3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.944175] env[69475]: DEBUG oslo_vmware.api [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1259.944175] env[69475]: value = "task-3509238" [ 1259.944175] env[69475]: _type = "Task" [ 1259.944175] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.945199] env[69475]: DEBUG oslo_vmware.api [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1259.945199] env[69475]: value = "task-3509237" [ 1259.945199] env[69475]: _type = "Task" [ 1259.945199] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.956658] env[69475]: DEBUG oslo_vmware.api [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509238, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.959648] env[69475]: DEBUG oslo_vmware.api [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509237, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.169971] env[69475]: DEBUG oslo_concurrency.lockutils [None req-77b2ef39-ec07-4601-be9e-d34b47d46109 tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "da3eff39-b80b-4574-9b07-df6f679a9f38" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.228s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.285115] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509236, 'name': CloneVM_Task} progress is 95%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.457362] env[69475]: DEBUG oslo_vmware.api [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509237, 'name': PowerOffVM_Task, 'duration_secs': 0.175311} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.460243] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1260.460420] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1260.460665] env[69475]: DEBUG oslo_vmware.api [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509238, 'name': PowerOffVM_Task, 'duration_secs': 0.204061} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.460867] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e254ee33-0b1d-49a4-8cd1-6a8b84658347 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.462285] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1260.462396] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1260.462616] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c4287a4-a714-4d62-b5cc-ef6f88ef0be6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.492265] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1260.492496] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1260.492676] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Deleting the datastore file [datastore1] 2255e878-8890-46cb-a0b2-863702743691 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1260.493533] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b38e7f9e-d068-4e0a-ac47-6990eb53c188 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.495416] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1260.495631] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1260.495820] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Deleting the datastore file [datastore1] 51667db3-801d-4a59-b4ee-220cbf638728 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1260.496147] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-99dadcc2-674b-4598-ac7a-56ecb3cb89a4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.502667] env[69475]: DEBUG oslo_vmware.api [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for the task: (returnval){ [ 1260.502667] env[69475]: value = "task-3509241" [ 1260.502667] env[69475]: _type = "Task" [ 1260.502667] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.504094] env[69475]: DEBUG oslo_vmware.api [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for the task: (returnval){ [ 1260.504094] env[69475]: value = "task-3509242" [ 1260.504094] env[69475]: _type = "Task" [ 1260.504094] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.514317] env[69475]: DEBUG oslo_vmware.api [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.517150] env[69475]: DEBUG oslo_vmware.api [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509242, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.783940] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509236, 'name': CloneVM_Task} progress is 95%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.016886] env[69475]: DEBUG oslo_vmware.api [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Task: {'id': task-3509241, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.11843} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.020532] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1261.020532] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1261.020532] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1261.020773] env[69475]: INFO nova.compute.manager [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] [instance: 2255e878-8890-46cb-a0b2-863702743691] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1261.020815] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1261.021028] env[69475]: DEBUG oslo_vmware.api [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Task: {'id': task-3509242, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.113373} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.021222] env[69475]: DEBUG nova.compute.manager [-] [instance: 2255e878-8890-46cb-a0b2-863702743691] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1261.021318] env[69475]: DEBUG nova.network.neutron [-] [instance: 2255e878-8890-46cb-a0b2-863702743691] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1261.022844] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1261.023037] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1261.023220] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1261.023385] env[69475]: INFO nova.compute.manager [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1261.023594] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1261.023781] env[69475]: DEBUG nova.compute.manager [-] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1261.023915] env[69475]: DEBUG nova.network.neutron [-] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1261.039969] env[69475]: DEBUG nova.network.neutron [-] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1261.041441] env[69475]: DEBUG nova.network.neutron [-] [instance: 2255e878-8890-46cb-a0b2-863702743691] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1261.199174] env[69475]: DEBUG oslo_concurrency.lockutils [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "da3eff39-b80b-4574-9b07-df6f679a9f38" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.199430] env[69475]: DEBUG oslo_concurrency.lockutils [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "da3eff39-b80b-4574-9b07-df6f679a9f38" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.199634] env[69475]: DEBUG oslo_concurrency.lockutils [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "da3eff39-b80b-4574-9b07-df6f679a9f38-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1261.199817] env[69475]: DEBUG oslo_concurrency.lockutils [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "da3eff39-b80b-4574-9b07-df6f679a9f38-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1261.199983] env[69475]: DEBUG oslo_concurrency.lockutils [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "da3eff39-b80b-4574-9b07-df6f679a9f38-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1261.202166] env[69475]: INFO nova.compute.manager [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Terminating instance [ 1261.285280] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509236, 'name': CloneVM_Task, 'duration_secs': 2.669519} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.285671] env[69475]: INFO nova.virt.vmwareapi.vmops [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Created linked-clone VM from snapshot [ 1261.286226] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f7a759c-f3f7-455b-b932-0ceb6fe35d88 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.293548] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Uploading image 91f6cee1-b739-4c54-a99e-94bb9b4710c5 {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1261.319198] env[69475]: DEBUG oslo_vmware.rw_handles [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1261.319198] env[69475]: value = "vm-701168" [ 1261.319198] env[69475]: _type = "VirtualMachine" [ 1261.319198] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1261.319467] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-125563cc-3c21-46b3-b9c4-968724c1e47b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.327466] env[69475]: DEBUG oslo_vmware.rw_handles [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lease: (returnval){ [ 1261.327466] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e60d89-5cf2-87e3-3545-8987d5f25958" [ 1261.327466] env[69475]: _type = "HttpNfcLease" [ 1261.327466] env[69475]: } obtained for exporting VM: (result){ [ 1261.327466] env[69475]: value = "vm-701168" [ 1261.327466] env[69475]: _type = "VirtualMachine" [ 1261.327466] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1261.327813] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the lease: (returnval){ [ 1261.327813] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e60d89-5cf2-87e3-3545-8987d5f25958" [ 1261.327813] env[69475]: _type = "HttpNfcLease" [ 1261.327813] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1261.334620] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1261.334620] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e60d89-5cf2-87e3-3545-8987d5f25958" [ 1261.334620] env[69475]: _type = "HttpNfcLease" [ 1261.334620] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1261.543545] env[69475]: DEBUG nova.network.neutron [-] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1261.544897] env[69475]: DEBUG nova.network.neutron [-] [instance: 2255e878-8890-46cb-a0b2-863702743691] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1261.705383] env[69475]: DEBUG nova.compute.manager [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1261.705617] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1261.706575] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c62efc-3645-4a4f-bbc1-9a60814b98f1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.714741] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1261.714967] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac73c6a5-fbf0-496e-98bd-124be1e39b9d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.722449] env[69475]: DEBUG oslo_vmware.api [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1261.722449] env[69475]: value = "task-3509244" [ 1261.722449] env[69475]: _type = "Task" [ 1261.722449] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.730586] env[69475]: DEBUG oslo_vmware.api [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509244, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.836841] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1261.836841] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e60d89-5cf2-87e3-3545-8987d5f25958" [ 1261.836841] env[69475]: _type = "HttpNfcLease" [ 1261.836841] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1261.837075] env[69475]: DEBUG oslo_vmware.rw_handles [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1261.837075] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]52e60d89-5cf2-87e3-3545-8987d5f25958" [ 1261.837075] env[69475]: _type = "HttpNfcLease" [ 1261.837075] env[69475]: }. {{(pid=69475) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1261.837828] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0277ff9e-a518-42d7-91bf-3f6e655b8700 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.845683] env[69475]: DEBUG oslo_vmware.rw_handles [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b8ffd4-1ae8-1f56-1fdb-e069e8e100dd/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1261.845900] env[69475]: DEBUG oslo_vmware.rw_handles [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b8ffd4-1ae8-1f56-1fdb-e069e8e100dd/disk-0.vmdk for reading. {{(pid=69475) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1261.950568] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7cef7494-3c31-4140-83f1-9c432b46b49f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.046786] env[69475]: INFO nova.compute.manager [-] [instance: 51667db3-801d-4a59-b4ee-220cbf638728] Took 1.02 seconds to deallocate network for instance. [ 1262.047115] env[69475]: INFO nova.compute.manager [-] [instance: 2255e878-8890-46cb-a0b2-863702743691] Took 1.03 seconds to deallocate network for instance. [ 1262.233687] env[69475]: DEBUG oslo_vmware.api [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509244, 'name': PowerOffVM_Task, 'duration_secs': 0.203846} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.233964] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1262.234148] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1262.234467] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e662b5ff-35ab-4ccf-bb14-e64dfd686979 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.315139] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1262.316019] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1262.316184] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Deleting the datastore file [datastore1] da3eff39-b80b-4574-9b07-df6f679a9f38 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1262.316540] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e89b7572-6584-4520-b84f-e65e6d9e9cae {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.323022] env[69475]: DEBUG oslo_vmware.api [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for the task: (returnval){ [ 1262.323022] env[69475]: value = "task-3509246" [ 1262.323022] env[69475]: _type = "Task" [ 1262.323022] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1262.334410] env[69475]: DEBUG oslo_vmware.api [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509246, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1262.556439] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1262.556940] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1262.557294] env[69475]: DEBUG nova.objects.instance [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lazy-loading 'resources' on Instance uuid 51667db3-801d-4a59-b4ee-220cbf638728 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1262.559176] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1262.833979] env[69475]: DEBUG oslo_vmware.api [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Task: {'id': task-3509246, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157743} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1262.834356] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1262.834563] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1262.834833] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1262.835094] env[69475]: INFO nova.compute.manager [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1262.835356] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1262.835564] env[69475]: DEBUG nova.compute.manager [-] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1262.835761] env[69475]: DEBUG nova.network.neutron [-] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1263.201698] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea0f82c-eca6-46df-be67-c016859b8462 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.210494] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf724f1-2c34-481c-9eab-3d8593629736 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.242585] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02905811-12c9-47ed-baaf-3975b8c365fd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.252574] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad28eb28-70fb-4216-9dbd-5a6abed35b31 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.268191] env[69475]: DEBUG nova.compute.provider_tree [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1263.337716] env[69475]: DEBUG nova.compute.manager [req-cb8c091d-ba01-49fa-be02-f40d3d81bc63 req-9a4b8ed3-7d7f-4da3-8cf4-b988c7ecb2aa service nova] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Received event network-vif-deleted-24283fcb-3bd1-46b1-a7e5-bf792688cc87 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1263.338417] env[69475]: INFO nova.compute.manager [req-cb8c091d-ba01-49fa-be02-f40d3d81bc63 req-9a4b8ed3-7d7f-4da3-8cf4-b988c7ecb2aa service nova] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Neutron deleted interface 24283fcb-3bd1-46b1-a7e5-bf792688cc87; detaching it from the instance and deleting it from the info cache [ 1263.339058] env[69475]: DEBUG nova.network.neutron [req-cb8c091d-ba01-49fa-be02-f40d3d81bc63 req-9a4b8ed3-7d7f-4da3-8cf4-b988c7ecb2aa service nova] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1263.772240] env[69475]: DEBUG nova.scheduler.client.report [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1263.820624] env[69475]: DEBUG nova.network.neutron [-] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1263.841522] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-279dc177-2f38-418f-ac4f-6bcca356fc12 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.853321] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7efa03d2-0f6c-4f21-8b01-8db2cf486530 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.887657] env[69475]: DEBUG nova.compute.manager [req-cb8c091d-ba01-49fa-be02-f40d3d81bc63 req-9a4b8ed3-7d7f-4da3-8cf4-b988c7ecb2aa service nova] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Detach interface failed, port_id=24283fcb-3bd1-46b1-a7e5-bf792688cc87, reason: Instance da3eff39-b80b-4574-9b07-df6f679a9f38 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1264.278082] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.721s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1264.280094] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.721s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1264.280331] env[69475]: DEBUG nova.objects.instance [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Lazy-loading 'resources' on Instance uuid 2255e878-8890-46cb-a0b2-863702743691 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1264.301092] env[69475]: INFO nova.scheduler.client.report [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Deleted allocations for instance 51667db3-801d-4a59-b4ee-220cbf638728 [ 1264.323453] env[69475]: INFO nova.compute.manager [-] [instance: da3eff39-b80b-4574-9b07-df6f679a9f38] Took 1.49 seconds to deallocate network for instance. [ 1264.809099] env[69475]: DEBUG oslo_concurrency.lockutils [None req-aeb88c41-ae56-4c9b-8626-73dbd2873dae tempest-ServerShowV247Test-297876258 tempest-ServerShowV247Test-297876258-project-member] Lock "51667db3-801d-4a59-b4ee-220cbf638728" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.468s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1264.830991] env[69475]: DEBUG oslo_concurrency.lockutils [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1264.902163] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb253ec2-41d4-4901-af01-5951ff951a81 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.910595] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f8e91b-9f07-47d7-8d81-7d17253d8147 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.945363] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeda52ab-cc7c-4eb4-b91c-5caca0a2e210 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.954187] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd2373e-5ad0-4f4a-9432-ebeb988b2f97 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.969221] env[69475]: DEBUG nova.compute.provider_tree [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1265.472863] env[69475]: DEBUG nova.scheduler.client.report [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1265.977503] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.697s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1265.980055] env[69475]: DEBUG oslo_concurrency.lockutils [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.149s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1265.980319] env[69475]: DEBUG nova.objects.instance [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lazy-loading 'resources' on Instance uuid da3eff39-b80b-4574-9b07-df6f679a9f38 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1266.013558] env[69475]: INFO nova.scheduler.client.report [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Deleted allocations for instance 2255e878-8890-46cb-a0b2-863702743691 [ 1266.521796] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ca423c43-74f0-465e-bff3-c51233f94596 tempest-ServersListShow296Test-1815597342 tempest-ServersListShow296Test-1815597342-project-member] Lock "2255e878-8890-46cb-a0b2-863702743691" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.191s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1266.588125] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa3bf34f-785e-4979-abc9-f8a01e1220a2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.597220] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c2c859e-f7ac-41f3-be7d-508a34477e30 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.629773] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f09cdb-7197-4275-9ad6-ca36b5e889ed {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.638358] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63309c3f-5469-4bb7-83f1-bb2d92c74d87 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1266.652553] env[69475]: DEBUG nova.compute.provider_tree [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1267.155629] env[69475]: DEBUG nova.scheduler.client.report [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1267.660626] env[69475]: DEBUG oslo_concurrency.lockutils [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.680s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.682762] env[69475]: INFO nova.scheduler.client.report [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Deleted allocations for instance da3eff39-b80b-4574-9b07-df6f679a9f38 [ 1268.190649] env[69475]: DEBUG oslo_concurrency.lockutils [None req-72cf724f-e57d-459d-830e-c7c7cc3e6c7b tempest-AttachVolumeNegativeTest-241775551 tempest-AttachVolumeNegativeTest-241775551-project-member] Lock "da3eff39-b80b-4574-9b07-df6f679a9f38" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.991s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1269.536462] env[69475]: DEBUG oslo_vmware.rw_handles [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b8ffd4-1ae8-1f56-1fdb-e069e8e100dd/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1269.537569] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71e39e5-a0e8-43bd-acf4-dc70d81effe4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.543654] env[69475]: DEBUG oslo_vmware.rw_handles [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b8ffd4-1ae8-1f56-1fdb-e069e8e100dd/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1269.543822] env[69475]: ERROR oslo_vmware.rw_handles [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b8ffd4-1ae8-1f56-1fdb-e069e8e100dd/disk-0.vmdk due to incomplete transfer. [ 1269.544057] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-01f1e15d-d554-4d7a-b929-9bf6f617fa90 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.551841] env[69475]: DEBUG oslo_vmware.rw_handles [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52b8ffd4-1ae8-1f56-1fdb-e069e8e100dd/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1269.552055] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Uploaded image 91f6cee1-b739-4c54-a99e-94bb9b4710c5 to the Glance image server {{(pid=69475) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1269.554389] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Destroying the VM {{(pid=69475) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1269.554648] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-d7f78f35-33d2-427e-bb7c-b0e37531d248 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.560864] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1269.560864] env[69475]: value = "task-3509248" [ 1269.560864] env[69475]: _type = "Task" [ 1269.560864] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1269.568516] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509248, 'name': Destroy_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.071186] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509248, 'name': Destroy_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1270.571374] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509248, 'name': Destroy_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.071185] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509248, 'name': Destroy_Task, 'duration_secs': 1.365032} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.071399] env[69475]: INFO nova.virt.vmwareapi.vm_util [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Destroyed the VM [ 1271.071634] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Deleting Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1271.071880] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-cdcfc2cc-d9ff-4d2f-b1b1-abdaad3a0d33 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1271.077895] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1271.077895] env[69475]: value = "task-3509249" [ 1271.077895] env[69475]: _type = "Task" [ 1271.077895] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1271.085190] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509249, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1271.587145] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509249, 'name': RemoveSnapshot_Task, 'duration_secs': 0.338203} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1271.587467] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Deleted Snapshot of the VM instance {{(pid=69475) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1271.587694] env[69475]: DEBUG nova.compute.manager [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1271.588441] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6c41574-48a7-4c07-af54-3ac575d8c5c0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.100382] env[69475]: INFO nova.compute.manager [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Shelve offloading [ 1272.603703] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1272.604089] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d35df4a8-22e2-4960-82c9-a8c3850265b5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.612242] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1272.612242] env[69475]: value = "task-3509251" [ 1272.612242] env[69475]: _type = "Task" [ 1272.612242] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1272.619754] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509251, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1273.123945] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] VM already powered off {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1273.124196] env[69475]: DEBUG nova.compute.manager [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1273.124963] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c64de7-21a4-4ace-a56c-1309415288ab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.130521] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1273.130683] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1273.130857] env[69475]: DEBUG nova.network.neutron [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1273.830174] env[69475]: DEBUG nova.network.neutron [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Updating instance_info_cache with network_info: [{"id": "530ddca5-14b1-40c3-912c-998398a229c1", "address": "fa:16:3e:6d:c6:28", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap530ddca5-14", "ovs_interfaceid": "530ddca5-14b1-40c3-912c-998398a229c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1274.275837] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.276084] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.332568] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1274.548118] env[69475]: DEBUG nova.compute.manager [req-5cb8ada9-7aa6-47ce-8701-b62757307e9e req-80bd1ab6-df10-4405-a4aa-544a31390272 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Received event network-vif-unplugged-530ddca5-14b1-40c3-912c-998398a229c1 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1274.548118] env[69475]: DEBUG oslo_concurrency.lockutils [req-5cb8ada9-7aa6-47ce-8701-b62757307e9e req-80bd1ab6-df10-4405-a4aa-544a31390272 service nova] Acquiring lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1274.548118] env[69475]: DEBUG oslo_concurrency.lockutils [req-5cb8ada9-7aa6-47ce-8701-b62757307e9e req-80bd1ab6-df10-4405-a4aa-544a31390272 service nova] Lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1274.548118] env[69475]: DEBUG oslo_concurrency.lockutils [req-5cb8ada9-7aa6-47ce-8701-b62757307e9e req-80bd1ab6-df10-4405-a4aa-544a31390272 service nova] Lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1274.548118] env[69475]: DEBUG nova.compute.manager [req-5cb8ada9-7aa6-47ce-8701-b62757307e9e req-80bd1ab6-df10-4405-a4aa-544a31390272 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] No waiting events found dispatching network-vif-unplugged-530ddca5-14b1-40c3-912c-998398a229c1 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1274.548793] env[69475]: WARNING nova.compute.manager [req-5cb8ada9-7aa6-47ce-8701-b62757307e9e req-80bd1ab6-df10-4405-a4aa-544a31390272 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Received unexpected event network-vif-unplugged-530ddca5-14b1-40c3-912c-998398a229c1 for instance with vm_state shelved and task_state shelving_offloading. [ 1274.631042] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1274.631942] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d7f7b9-13a3-485a-9ed5-42fefe50f45e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.639533] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1274.639747] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2eacacb0-ca4e-4e01-87f1-17a739f12120 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.700944] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1274.701175] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1274.701361] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Deleting the datastore file [datastore1] 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1274.701619] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-180936f5-1264-4df7-8ea1-56f4d7032199 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.710588] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1274.710588] env[69475]: value = "task-3509254" [ 1274.710588] env[69475]: _type = "Task" [ 1274.710588] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.722993] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509254, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.781576] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.781754] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.781929] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.782123] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.782288] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.782453] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.782593] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69475) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1274.783070] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager.update_available_resource {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1275.220312] env[69475]: DEBUG oslo_vmware.api [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509254, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146168} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.220612] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1275.220755] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1275.220926] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1275.242441] env[69475]: INFO nova.scheduler.client.report [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Deleted allocations for instance 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db [ 1275.284977] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1275.285283] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1275.285455] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1275.285631] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69475) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1275.286765] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb05a03-049b-4c52-ac3a-84d556ca58c7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.295312] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8d5b16-2c42-4810-9375-919ccb7bc4b3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.310525] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83fb7c4f-ed8b-4780-91a2-02a216d83098 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.316722] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27936e61-4192-4b0d-85ea-f72fa64e5e7d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.354183] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179804MB free_disk=89GB free_vcpus=48 pci_devices=None {{(pid=69475) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1275.354344] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1275.354552] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1275.748224] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1276.378258] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 8d50b322-fa03-4e48-b74b-a63578e4701c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1276.378521] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance d63ddc35-06b3-43a2-bdd5-a91cf4047a4b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1276.378566] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance b6a785b0-7ae8-4856-b5a8-e017cfd376d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1276.378663] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1276.378769] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance edec6d3e-1881-4d6a-9e0f-c9a177e334ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1276.379071] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1276.379176] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1276.450444] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa268f0-5949-4c2b-91d5-da421b70434f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.457777] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-635bdac3-0560-453e-9fb2-ce70bd728404 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.488713] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27820d6-aab1-4716-a1da-5df502ebc8e9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.496111] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b254c09e-384b-4a42-a23e-09f2c20d40f5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.509075] env[69475]: DEBUG nova.compute.provider_tree [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1276.574963] env[69475]: DEBUG nova.compute.manager [req-503dbca1-d116-4629-ab1b-ce0868b00c80 req-6a9be3a4-6c36-484e-bfa5-9c99095cb74e service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Received event network-changed-530ddca5-14b1-40c3-912c-998398a229c1 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1276.575116] env[69475]: DEBUG nova.compute.manager [req-503dbca1-d116-4629-ab1b-ce0868b00c80 req-6a9be3a4-6c36-484e-bfa5-9c99095cb74e service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Refreshing instance network info cache due to event network-changed-530ddca5-14b1-40c3-912c-998398a229c1. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1276.575336] env[69475]: DEBUG oslo_concurrency.lockutils [req-503dbca1-d116-4629-ab1b-ce0868b00c80 req-6a9be3a4-6c36-484e-bfa5-9c99095cb74e service nova] Acquiring lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.575483] env[69475]: DEBUG oslo_concurrency.lockutils [req-503dbca1-d116-4629-ab1b-ce0868b00c80 req-6a9be3a4-6c36-484e-bfa5-9c99095cb74e service nova] Acquired lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1276.575646] env[69475]: DEBUG nova.network.neutron [req-503dbca1-d116-4629-ab1b-ce0868b00c80 req-6a9be3a4-6c36-484e-bfa5-9c99095cb74e service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Refreshing network info cache for port 530ddca5-14b1-40c3-912c-998398a229c1 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1277.012432] env[69475]: DEBUG nova.scheduler.client.report [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1277.284489] env[69475]: DEBUG nova.network.neutron [req-503dbca1-d116-4629-ab1b-ce0868b00c80 req-6a9be3a4-6c36-484e-bfa5-9c99095cb74e service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Updated VIF entry in instance network info cache for port 530ddca5-14b1-40c3-912c-998398a229c1. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1277.284880] env[69475]: DEBUG nova.network.neutron [req-503dbca1-d116-4629-ab1b-ce0868b00c80 req-6a9be3a4-6c36-484e-bfa5-9c99095cb74e service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Updating instance_info_cache with network_info: [{"id": "530ddca5-14b1-40c3-912c-998398a229c1", "address": "fa:16:3e:6d:c6:28", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": null, "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap530ddca5-14", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.517604] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69475) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1277.517920] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.163s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1277.518063] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.770s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1277.518291] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1277.766019] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1277.787995] env[69475]: DEBUG oslo_concurrency.lockutils [req-503dbca1-d116-4629-ab1b-ce0868b00c80 req-6a9be3a4-6c36-484e-bfa5-9c99095cb74e service nova] Releasing lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1277.917674] env[69475]: DEBUG oslo_concurrency.lockutils [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1277.917977] env[69475]: DEBUG oslo_concurrency.lockutils [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1277.918223] env[69475]: DEBUG oslo_concurrency.lockutils [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1277.918406] env[69475]: DEBUG oslo_concurrency.lockutils [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1277.918573] env[69475]: DEBUG oslo_concurrency.lockutils [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1277.921070] env[69475]: INFO nova.compute.manager [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Terminating instance [ 1278.025626] env[69475]: DEBUG oslo_concurrency.lockutils [None req-cdb1bd03-5c88-48ce-8f71-23426d390d91 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.404s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1278.026633] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.261s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1278.026694] env[69475]: INFO nova.compute.manager [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Unshelving [ 1278.425222] env[69475]: DEBUG nova.compute.manager [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1278.425497] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1278.425792] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b9d82f86-ec56-4f9e-ad00-d56db08d8048 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.433428] env[69475]: DEBUG oslo_vmware.api [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1278.433428] env[69475]: value = "task-3509255" [ 1278.433428] env[69475]: _type = "Task" [ 1278.433428] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1278.442231] env[69475]: DEBUG oslo_vmware.api [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509255, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1278.943804] env[69475]: DEBUG oslo_vmware.api [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509255, 'name': PowerOffVM_Task, 'duration_secs': 0.193394} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1278.944207] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1278.944913] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1278.945142] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701152', 'volume_id': '1a0e6d06-d9be-4cb5-8898-b91d8b1fff30', 'name': 'volume-1a0e6d06-d9be-4cb5-8898-b91d8b1fff30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2', 'attached_at': '2025-04-22T09:46:18.000000', 'detached_at': '', 'volume_id': '1a0e6d06-d9be-4cb5-8898-b91d8b1fff30', 'serial': '1a0e6d06-d9be-4cb5-8898-b91d8b1fff30'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1278.946424] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9962b340-a003-43c4-8e8b-aa465818dd43 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.964970] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f15ff0a9-e174-42de-8470-9b35f83785dc {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.971569] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb123f6-d800-44f3-9fad-ae856efb8fe1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.988546] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bba29b6-9521-47cb-8b18-9ac266401a47 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.003806] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] The volume has not been displaced from its original location: [datastore1] volume-1a0e6d06-d9be-4cb5-8898-b91d8b1fff30/volume-1a0e6d06-d9be-4cb5-8898-b91d8b1fff30.vmdk. No consolidation needed. {{(pid=69475) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1279.009053] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Reconfiguring VM instance instance-0000007b to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1279.009327] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ef4ad7e7-e0ad-4dbf-95e9-cb502048d3f4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.025823] env[69475]: DEBUG oslo_vmware.api [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1279.025823] env[69475]: value = "task-3509256" [ 1279.025823] env[69475]: _type = "Task" [ 1279.025823] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.032963] env[69475]: DEBUG oslo_vmware.api [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509256, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1279.051559] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1279.051819] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1279.052038] env[69475]: DEBUG nova.objects.instance [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lazy-loading 'pci_requests' on Instance uuid 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1279.535113] env[69475]: DEBUG oslo_vmware.api [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509256, 'name': ReconfigVM_Task, 'duration_secs': 0.176882} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1279.535378] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Reconfigured VM instance instance-0000007b to detach disk 2000 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1279.540057] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbb6966c-8275-4b5c-b268-5184b101442c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.556378] env[69475]: DEBUG nova.objects.instance [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lazy-loading 'numa_topology' on Instance uuid 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1279.557509] env[69475]: DEBUG oslo_vmware.api [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1279.557509] env[69475]: value = "task-3509257" [ 1279.557509] env[69475]: _type = "Task" [ 1279.557509] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.566479] env[69475]: DEBUG oslo_vmware.api [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509257, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.058663] env[69475]: INFO nova.compute.claims [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1280.070497] env[69475]: DEBUG oslo_vmware.api [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509257, 'name': ReconfigVM_Task, 'duration_secs': 0.217683} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.070638] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701152', 'volume_id': '1a0e6d06-d9be-4cb5-8898-b91d8b1fff30', 'name': 'volume-1a0e6d06-d9be-4cb5-8898-b91d8b1fff30', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2', 'attached_at': '2025-04-22T09:46:18.000000', 'detached_at': '', 'volume_id': '1a0e6d06-d9be-4cb5-8898-b91d8b1fff30', 'serial': '1a0e6d06-d9be-4cb5-8898-b91d8b1fff30'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1280.070910] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1280.072122] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ee7b96c-670d-48c0-84ff-6d749e98d85f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.079072] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1280.079650] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f8815b1-af00-4da5-ae2d-019e77c0378c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.373107] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1280.373319] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1280.373506] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleting the datastore file [datastore1] 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1280.373798] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9dd059c7-0082-49c7-b0b7-714ab5e22ce6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.380241] env[69475]: DEBUG oslo_vmware.api [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1280.380241] env[69475]: value = "task-3509259" [ 1280.380241] env[69475]: _type = "Task" [ 1280.380241] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1280.388476] env[69475]: DEBUG oslo_vmware.api [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509259, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.640016] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7edd7bd9-2798-4dbf-9223-d01ff4c4afbb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.647470] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7886b60-4163-4ccf-ac6c-a98ab2ddfa65 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.676445] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eb86fba-83d0-4dbf-8f60-bae4018ae816 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.683028] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a59cd49-b1c0-4814-9fc1-1a782848db5a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1280.696957] env[69475]: DEBUG nova.compute.provider_tree [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1280.889639] env[69475]: DEBUG oslo_vmware.api [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509259, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.098281} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1280.889946] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1280.890172] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1280.890353] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1280.890528] env[69475]: INFO nova.compute.manager [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Took 2.47 seconds to destroy the instance on the hypervisor. [ 1280.890765] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1280.890955] env[69475]: DEBUG nova.compute.manager [-] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1280.891067] env[69475]: DEBUG nova.network.neutron [-] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1281.200104] env[69475]: DEBUG nova.scheduler.client.report [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1281.323201] env[69475]: DEBUG nova.compute.manager [req-54a2a671-7d86-46dd-a234-251322056882 req-a2005119-ce21-4ec7-9d1a-4d82e8742442 service nova] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Received event network-vif-deleted-789f3dcb-d9c8-495b-b66c-896fb31e0e63 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1281.323542] env[69475]: INFO nova.compute.manager [req-54a2a671-7d86-46dd-a234-251322056882 req-a2005119-ce21-4ec7-9d1a-4d82e8742442 service nova] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Neutron deleted interface 789f3dcb-d9c8-495b-b66c-896fb31e0e63; detaching it from the instance and deleting it from the info cache [ 1281.323826] env[69475]: DEBUG nova.network.neutron [req-54a2a671-7d86-46dd-a234-251322056882 req-a2005119-ce21-4ec7-9d1a-4d82e8742442 service nova] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.705091] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.653s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1281.745637] env[69475]: INFO nova.network.neutron [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Updating port 530ddca5-14b1-40c3-912c-998398a229c1 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1281.803795] env[69475]: DEBUG nova.network.neutron [-] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.827279] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a1e1161-bb28-4a87-8672-10386eafb244 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.839501] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa9db60-ccbc-4429-b977-55b0bca7b688 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1281.869628] env[69475]: DEBUG nova.compute.manager [req-54a2a671-7d86-46dd-a234-251322056882 req-a2005119-ce21-4ec7-9d1a-4d82e8742442 service nova] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Detach interface failed, port_id=789f3dcb-d9c8-495b-b66c-896fb31e0e63, reason: Instance 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1282.306338] env[69475]: INFO nova.compute.manager [-] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Took 1.42 seconds to deallocate network for instance. [ 1282.856061] env[69475]: INFO nova.compute.manager [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Took 0.55 seconds to detach 1 volumes for instance. [ 1282.858343] env[69475]: DEBUG nova.compute.manager [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2] Deleting volume: 1a0e6d06-d9be-4cb5-8898-b91d8b1fff30 {{(pid=69475) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1283.226835] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1283.227053] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1283.227264] env[69475]: DEBUG nova.network.neutron [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1283.353158] env[69475]: DEBUG nova.compute.manager [req-4fd75edf-dc21-44c2-bd56-325a323ceafc req-1684e99f-edf1-41ca-bc17-ef3a7aa849b4 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Received event network-vif-plugged-530ddca5-14b1-40c3-912c-998398a229c1 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1283.353438] env[69475]: DEBUG oslo_concurrency.lockutils [req-4fd75edf-dc21-44c2-bd56-325a323ceafc req-1684e99f-edf1-41ca-bc17-ef3a7aa849b4 service nova] Acquiring lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1283.353758] env[69475]: DEBUG oslo_concurrency.lockutils [req-4fd75edf-dc21-44c2-bd56-325a323ceafc req-1684e99f-edf1-41ca-bc17-ef3a7aa849b4 service nova] Lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1283.353758] env[69475]: DEBUG oslo_concurrency.lockutils [req-4fd75edf-dc21-44c2-bd56-325a323ceafc req-1684e99f-edf1-41ca-bc17-ef3a7aa849b4 service nova] Lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1283.353969] env[69475]: DEBUG nova.compute.manager [req-4fd75edf-dc21-44c2-bd56-325a323ceafc req-1684e99f-edf1-41ca-bc17-ef3a7aa849b4 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] No waiting events found dispatching network-vif-plugged-530ddca5-14b1-40c3-912c-998398a229c1 {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1283.354073] env[69475]: WARNING nova.compute.manager [req-4fd75edf-dc21-44c2-bd56-325a323ceafc req-1684e99f-edf1-41ca-bc17-ef3a7aa849b4 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Received unexpected event network-vif-plugged-530ddca5-14b1-40c3-912c-998398a229c1 for instance with vm_state shelved_offloaded and task_state spawning. [ 1283.354237] env[69475]: DEBUG nova.compute.manager [req-4fd75edf-dc21-44c2-bd56-325a323ceafc req-1684e99f-edf1-41ca-bc17-ef3a7aa849b4 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Received event network-changed-530ddca5-14b1-40c3-912c-998398a229c1 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1283.354390] env[69475]: DEBUG nova.compute.manager [req-4fd75edf-dc21-44c2-bd56-325a323ceafc req-1684e99f-edf1-41ca-bc17-ef3a7aa849b4 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Refreshing instance network info cache due to event network-changed-530ddca5-14b1-40c3-912c-998398a229c1. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1283.354550] env[69475]: DEBUG oslo_concurrency.lockutils [req-4fd75edf-dc21-44c2-bd56-325a323ceafc req-1684e99f-edf1-41ca-bc17-ef3a7aa849b4 service nova] Acquiring lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1283.400083] env[69475]: DEBUG oslo_concurrency.lockutils [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1283.400476] env[69475]: DEBUG oslo_concurrency.lockutils [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1283.400618] env[69475]: DEBUG nova.objects.instance [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lazy-loading 'resources' on Instance uuid 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1283.931784] env[69475]: DEBUG nova.network.neutron [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Updating instance_info_cache with network_info: [{"id": "530ddca5-14b1-40c3-912c-998398a229c1", "address": "fa:16:3e:6d:c6:28", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap530ddca5-14", "ovs_interfaceid": "530ddca5-14b1-40c3-912c-998398a229c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1283.993488] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3e1cd2-87b7-4798-a84d-822ca44784b5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.001770] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-044d59bb-eb6d-4ae4-84bf-76f73f3cedb4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.032129] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74314937-ee63-42fb-9122-df0e4268d296 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.039421] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91ba29f-6d6f-476a-8a15-548a780d3e82 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.052894] env[69475]: DEBUG nova.compute.provider_tree [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1284.436887] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1284.439272] env[69475]: DEBUG oslo_concurrency.lockutils [req-4fd75edf-dc21-44c2-bd56-325a323ceafc req-1684e99f-edf1-41ca-bc17-ef3a7aa849b4 service nova] Acquired lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1284.439459] env[69475]: DEBUG nova.network.neutron [req-4fd75edf-dc21-44c2-bd56-325a323ceafc req-1684e99f-edf1-41ca-bc17-ef3a7aa849b4 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Refreshing network info cache for port 530ddca5-14b1-40c3-912c-998398a229c1 {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1284.463795] env[69475]: DEBUG nova.virt.hardware [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='20eac91569ba8d679869fdb599fbbb59',container_format='bare',created_at=2025-04-22T09:46:31Z,direct_url=,disk_format='vmdk',id=91f6cee1-b739-4c54-a99e-94bb9b4710c5,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-1400713814-shelved',owner='ca5098b4aae94c08b3f8ffd66aae2e2c',properties=ImageMetaProps,protected=,size=31665664,status='active',tags=,updated_at=2025-04-22T09:46:47Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1284.464039] env[69475]: DEBUG nova.virt.hardware [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1284.464201] env[69475]: DEBUG nova.virt.hardware [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1284.464378] env[69475]: DEBUG nova.virt.hardware [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1284.464522] env[69475]: DEBUG nova.virt.hardware [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1284.464667] env[69475]: DEBUG nova.virt.hardware [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1284.464871] env[69475]: DEBUG nova.virt.hardware [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1284.465041] env[69475]: DEBUG nova.virt.hardware [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1284.465210] env[69475]: DEBUG nova.virt.hardware [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1284.465370] env[69475]: DEBUG nova.virt.hardware [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1284.465537] env[69475]: DEBUG nova.virt.hardware [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1284.466373] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48a305cb-d07b-4adb-8c48-349c04958f78 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.473698] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4ef31272-1754-4f99-83c4-3dd05a466a60 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "edec6d3e-1881-4d6a-9e0f-c9a177e334ad" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1284.473918] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4ef31272-1754-4f99-83c4-3dd05a466a60 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "edec6d3e-1881-4d6a-9e0f-c9a177e334ad" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1284.474110] env[69475]: DEBUG nova.compute.manager [None req-4ef31272-1754-4f99-83c4-3dd05a466a60 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1284.474854] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558b4910-cc6a-4183-ac6f-67b2365a7f89 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.478047] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-131cde75-5f65-4d71-81fc-b96d96b22276 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.486473] env[69475]: DEBUG nova.compute.manager [None req-4ef31272-1754-4f99-83c4-3dd05a466a60 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=69475) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1284.487023] env[69475]: DEBUG nova.objects.instance [None req-4ef31272-1754-4f99-83c4-3dd05a466a60 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lazy-loading 'flavor' on Instance uuid edec6d3e-1881-4d6a-9e0f-c9a177e334ad {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1284.495711] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:c6:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92fe29b3-0907-453d-aabb-5559c4bd7c0f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '530ddca5-14b1-40c3-912c-998398a229c1', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1284.503173] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1284.504072] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1284.504283] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3758d70f-9c3b-46a7-bce3-fd4f16d1f421 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.522339] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1284.522339] env[69475]: value = "task-3509261" [ 1284.522339] env[69475]: _type = "Task" [ 1284.522339] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.531830] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509261, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.556447] env[69475]: DEBUG nova.scheduler.client.report [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1285.032551] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509261, 'name': CreateVM_Task, 'duration_secs': 0.29436} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.032709] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1285.033592] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/91f6cee1-b739-4c54-a99e-94bb9b4710c5" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.033754] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "[datastore1] devstack-image-cache_base/91f6cee1-b739-4c54-a99e-94bb9b4710c5" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1285.034154] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/91f6cee1-b739-4c54-a99e-94bb9b4710c5" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1285.034403] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91c01633-2db7-4932-950a-1d96518f2f95 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.039112] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1285.039112] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528711de-1b63-c2ee-bf06-4b16163a7657" [ 1285.039112] env[69475]: _type = "Task" [ 1285.039112] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.047141] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]528711de-1b63-c2ee-bf06-4b16163a7657, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.060992] env[69475]: DEBUG oslo_concurrency.lockutils [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.661s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1285.084400] env[69475]: INFO nova.scheduler.client.report [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleted allocations for instance 319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2 [ 1285.167235] env[69475]: DEBUG nova.network.neutron [req-4fd75edf-dc21-44c2-bd56-325a323ceafc req-1684e99f-edf1-41ca-bc17-ef3a7aa849b4 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Updated VIF entry in instance network info cache for port 530ddca5-14b1-40c3-912c-998398a229c1. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1285.167593] env[69475]: DEBUG nova.network.neutron [req-4fd75edf-dc21-44c2-bd56-325a323ceafc req-1684e99f-edf1-41ca-bc17-ef3a7aa849b4 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Updating instance_info_cache with network_info: [{"id": "530ddca5-14b1-40c3-912c-998398a229c1", "address": "fa:16:3e:6d:c6:28", "network": {"id": "73721e77-671e-4db0-b8a1-1b61bcf6e870", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-465277670-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ca5098b4aae94c08b3f8ffd66aae2e2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92fe29b3-0907-453d-aabb-5559c4bd7c0f", "external-id": "nsx-vlan-transportzone-482", "segmentation_id": 482, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap530ddca5-14", "ovs_interfaceid": "530ddca5-14b1-40c3-912c-998398a229c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1285.509265] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ef31272-1754-4f99-83c4-3dd05a466a60 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1285.509513] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69716faf-caee-49e2-884f-408956f104d8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.518363] env[69475]: DEBUG oslo_vmware.api [None req-4ef31272-1754-4f99-83c4-3dd05a466a60 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1285.518363] env[69475]: value = "task-3509262" [ 1285.518363] env[69475]: _type = "Task" [ 1285.518363] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.525850] env[69475]: DEBUG oslo_vmware.api [None req-4ef31272-1754-4f99-83c4-3dd05a466a60 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509262, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.548753] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "[datastore1] devstack-image-cache_base/91f6cee1-b739-4c54-a99e-94bb9b4710c5" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1285.549046] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Processing image 91f6cee1-b739-4c54-a99e-94bb9b4710c5 {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1285.549530] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/91f6cee1-b739-4c54-a99e-94bb9b4710c5/91f6cee1-b739-4c54-a99e-94bb9b4710c5.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1285.549530] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquired lock "[datastore1] devstack-image-cache_base/91f6cee1-b739-4c54-a99e-94bb9b4710c5/91f6cee1-b739-4c54-a99e-94bb9b4710c5.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1285.549651] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1285.549832] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-511418ea-aad1-472e-8950-5058eb38708a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.560235] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1285.560419] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1285.561142] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d030d69f-0885-4aa1-8617-ea288a6b6e83 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.565922] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1285.565922] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523e9242-13c5-b72b-fa09-e746cf1bd9eb" [ 1285.565922] env[69475]: _type = "Task" [ 1285.565922] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.575411] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523e9242-13c5-b72b-fa09-e746cf1bd9eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.591551] env[69475]: DEBUG oslo_concurrency.lockutils [None req-de7d8d0f-68d8-4d4b-b4fc-b6c7eea89940 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "319fc3f2-5c1b-46f5-ac09-e3d8f61fe3e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.674s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1285.671412] env[69475]: DEBUG oslo_concurrency.lockutils [req-4fd75edf-dc21-44c2-bd56-325a323ceafc req-1684e99f-edf1-41ca-bc17-ef3a7aa849b4 service nova] Releasing lock "refresh_cache-15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1286.028430] env[69475]: DEBUG oslo_vmware.api [None req-4ef31272-1754-4f99-83c4-3dd05a466a60 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509262, 'name': PowerOffVM_Task, 'duration_secs': 0.193415} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.028668] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ef31272-1754-4f99-83c4-3dd05a466a60 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1286.028866] env[69475]: DEBUG nova.compute.manager [None req-4ef31272-1754-4f99-83c4-3dd05a466a60 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1286.029629] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1a8e98-0bb9-404d-b2e4-92e5d8f909e8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.077022] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Preparing fetch location {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1286.077328] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Fetch image to [datastore1] OSTACK_IMG_41148a9d-4b89-4014-9227-47609094b60b/OSTACK_IMG_41148a9d-4b89-4014-9227-47609094b60b.vmdk {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1286.077523] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Downloading stream optimized image 91f6cee1-b739-4c54-a99e-94bb9b4710c5 to [datastore1] OSTACK_IMG_41148a9d-4b89-4014-9227-47609094b60b/OSTACK_IMG_41148a9d-4b89-4014-9227-47609094b60b.vmdk on the data store datastore1 as vApp {{(pid=69475) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1286.077717] env[69475]: DEBUG nova.virt.vmwareapi.images [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Downloading image file data 91f6cee1-b739-4c54-a99e-94bb9b4710c5 to the ESX as VM named 'OSTACK_IMG_41148a9d-4b89-4014-9227-47609094b60b' {{(pid=69475) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1286.152146] env[69475]: DEBUG oslo_vmware.rw_handles [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1286.152146] env[69475]: value = "resgroup-9" [ 1286.152146] env[69475]: _type = "ResourcePool" [ 1286.152146] env[69475]: }. {{(pid=69475) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1286.152663] env[69475]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-e31c9e6e-3a2f-492b-8e12-955965e3dfbe {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.173808] env[69475]: DEBUG oslo_vmware.rw_handles [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lease: (returnval){ [ 1286.173808] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5267c3dc-45a5-dedd-9abd-45a564ff1bfa" [ 1286.173808] env[69475]: _type = "HttpNfcLease" [ 1286.173808] env[69475]: } obtained for vApp import into resource pool (val){ [ 1286.173808] env[69475]: value = "resgroup-9" [ 1286.173808] env[69475]: _type = "ResourcePool" [ 1286.173808] env[69475]: }. {{(pid=69475) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1286.174150] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the lease: (returnval){ [ 1286.174150] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5267c3dc-45a5-dedd-9abd-45a564ff1bfa" [ 1286.174150] env[69475]: _type = "HttpNfcLease" [ 1286.174150] env[69475]: } to be ready. {{(pid=69475) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1286.183922] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1286.183922] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5267c3dc-45a5-dedd-9abd-45a564ff1bfa" [ 1286.183922] env[69475]: _type = "HttpNfcLease" [ 1286.183922] env[69475]: } is initializing. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1286.541760] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4ef31272-1754-4f99-83c4-3dd05a466a60 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "edec6d3e-1881-4d6a-9e0f-c9a177e334ad" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.068s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1286.682613] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1286.682886] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1286.683140] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1286.683339] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1286.683522] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1286.685157] env[69475]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1286.685157] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5267c3dc-45a5-dedd-9abd-45a564ff1bfa" [ 1286.685157] env[69475]: _type = "HttpNfcLease" [ 1286.685157] env[69475]: } is ready. {{(pid=69475) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1286.685758] env[69475]: DEBUG oslo_vmware.rw_handles [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1286.685758] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]5267c3dc-45a5-dedd-9abd-45a564ff1bfa" [ 1286.685758] env[69475]: _type = "HttpNfcLease" [ 1286.685758] env[69475]: }. {{(pid=69475) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1286.686274] env[69475]: INFO nova.compute.manager [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Terminating instance [ 1286.688163] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000dc037-a1b3-4d27-b6f6-84b4e2f734db {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.697285] env[69475]: DEBUG oslo_vmware.rw_handles [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527cf274-3b0c-064f-325f-6417c35f3206/disk-0.vmdk from lease info. {{(pid=69475) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1286.697496] env[69475]: DEBUG oslo_vmware.rw_handles [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Creating HTTP connection to write to file with size = 31665664 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527cf274-3b0c-064f-325f-6417c35f3206/disk-0.vmdk. {{(pid=69475) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1286.753928] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "b6a785b0-7ae8-4856-b5a8-e017cfd376d8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1286.754200] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "b6a785b0-7ae8-4856-b5a8-e017cfd376d8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1286.754401] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "b6a785b0-7ae8-4856-b5a8-e017cfd376d8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1286.754578] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "b6a785b0-7ae8-4856-b5a8-e017cfd376d8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1286.754849] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "b6a785b0-7ae8-4856-b5a8-e017cfd376d8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1286.758370] env[69475]: INFO nova.compute.manager [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Terminating instance [ 1286.765609] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a50fbf14-224d-4b6a-9d2d-50d53c4c4a45 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.908038] env[69475]: DEBUG nova.objects.instance [None req-babdaf3d-4f91-47d9-8eb3-fb8e397e4087 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lazy-loading 'flavor' on Instance uuid edec6d3e-1881-4d6a-9e0f-c9a177e334ad {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1287.193765] env[69475]: DEBUG nova.compute.manager [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1287.193942] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1287.194836] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6ce23e-3f3c-4ca6-a03d-82fac02d87a5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.203621] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1287.205039] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b3f25e3-b97b-4217-9c8b-2eef6d5bb985 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.210749] env[69475]: DEBUG oslo_vmware.api [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1287.210749] env[69475]: value = "task-3509264" [ 1287.210749] env[69475]: _type = "Task" [ 1287.210749] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.220788] env[69475]: DEBUG oslo_vmware.api [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509264, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.267559] env[69475]: DEBUG nova.compute.manager [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1287.267856] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1287.268891] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6effe465-26d1-43d8-b1b2-50e0bc1198b3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.279278] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1287.280065] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a47c1480-5ef8-46ec-900f-770b4e2905aa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.287016] env[69475]: DEBUG oslo_vmware.api [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1287.287016] env[69475]: value = "task-3509265" [ 1287.287016] env[69475]: _type = "Task" [ 1287.287016] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.297471] env[69475]: DEBUG oslo_vmware.api [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509265, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.417875] env[69475]: DEBUG oslo_concurrency.lockutils [None req-babdaf3d-4f91-47d9-8eb3-fb8e397e4087 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "refresh_cache-edec6d3e-1881-4d6a-9e0f-c9a177e334ad" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1287.418122] env[69475]: DEBUG oslo_concurrency.lockutils [None req-babdaf3d-4f91-47d9-8eb3-fb8e397e4087 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "refresh_cache-edec6d3e-1881-4d6a-9e0f-c9a177e334ad" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1287.418395] env[69475]: DEBUG nova.network.neutron [None req-babdaf3d-4f91-47d9-8eb3-fb8e397e4087 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1287.418596] env[69475]: DEBUG nova.objects.instance [None req-babdaf3d-4f91-47d9-8eb3-fb8e397e4087 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lazy-loading 'info_cache' on Instance uuid edec6d3e-1881-4d6a-9e0f-c9a177e334ad {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1287.723047] env[69475]: DEBUG oslo_vmware.api [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509264, 'name': PowerOffVM_Task, 'duration_secs': 0.201106} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.725756] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1287.725948] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1287.726264] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-198067c1-47e3-40d5-881b-6f49da17f5b2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.797866] env[69475]: DEBUG oslo_vmware.api [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509265, 'name': PowerOffVM_Task, 'duration_secs': 0.227966} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.798119] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1287.798288] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1287.798530] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff12fd1d-4dc3-4390-becd-2d462eecef78 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.860416] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1287.860629] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1287.860800] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleting the datastore file [datastore1] b6a785b0-7ae8-4856-b5a8-e017cfd376d8 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1287.861098] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7210e825-8709-4b2a-83db-17fd1c9c5171 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.868279] env[69475]: DEBUG oslo_vmware.api [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1287.868279] env[69475]: value = "task-3509268" [ 1287.868279] env[69475]: _type = "Task" [ 1287.868279] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.880225] env[69475]: DEBUG oslo_vmware.api [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509268, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.922644] env[69475]: DEBUG nova.objects.base [None req-babdaf3d-4f91-47d9-8eb3-fb8e397e4087 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=69475) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1287.936406] env[69475]: DEBUG oslo_vmware.rw_handles [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Completed reading data from the image iterator. {{(pid=69475) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1287.936603] env[69475]: DEBUG oslo_vmware.rw_handles [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527cf274-3b0c-064f-325f-6417c35f3206/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1287.938013] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da217df3-d8c4-4fcb-afbd-e61b16748973 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.942532] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1287.942678] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1287.942830] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Deleting the datastore file [datastore2] d63ddc35-06b3-43a2-bdd5-a91cf4047a4b {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1287.943565] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5f1dac8-5cd4-43c5-bbcf-4d45ca94dc0c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.947504] env[69475]: DEBUG oslo_vmware.rw_handles [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527cf274-3b0c-064f-325f-6417c35f3206/disk-0.vmdk is in state: ready. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1287.947689] env[69475]: DEBUG oslo_vmware.rw_handles [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527cf274-3b0c-064f-325f-6417c35f3206/disk-0.vmdk. {{(pid=69475) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1287.948334] env[69475]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-2502514e-6170-4765-9f26-d3f2c0b9722f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.952132] env[69475]: DEBUG oslo_vmware.api [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1287.952132] env[69475]: value = "task-3509269" [ 1287.952132] env[69475]: _type = "Task" [ 1287.952132] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.959958] env[69475]: DEBUG oslo_vmware.api [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509269, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.124822] env[69475]: DEBUG oslo_vmware.rw_handles [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/527cf274-3b0c-064f-325f-6417c35f3206/disk-0.vmdk. {{(pid=69475) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1288.125178] env[69475]: INFO nova.virt.vmwareapi.images [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Downloaded image file data 91f6cee1-b739-4c54-a99e-94bb9b4710c5 [ 1288.126163] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c959eac0-3ecf-463c-b939-ae08c6bb2e6b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.141382] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0a3b350c-f979-4e75-8d2e-406850905732 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.171138] env[69475]: INFO nova.virt.vmwareapi.images [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] The imported VM was unregistered [ 1288.173615] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Caching image {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1288.173850] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Creating directory with path [datastore1] devstack-image-cache_base/91f6cee1-b739-4c54-a99e-94bb9b4710c5 {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1288.174126] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dada801a-4a88-4c36-93b0-09d596272767 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.192021] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Created directory with path [datastore1] devstack-image-cache_base/91f6cee1-b739-4c54-a99e-94bb9b4710c5 {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1288.192202] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_41148a9d-4b89-4014-9227-47609094b60b/OSTACK_IMG_41148a9d-4b89-4014-9227-47609094b60b.vmdk to [datastore1] devstack-image-cache_base/91f6cee1-b739-4c54-a99e-94bb9b4710c5/91f6cee1-b739-4c54-a99e-94bb9b4710c5.vmdk. {{(pid=69475) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1288.192448] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-f79d2c03-061f-4375-8a19-3b678ca87288 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.199116] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1288.199116] env[69475]: value = "task-3509271" [ 1288.199116] env[69475]: _type = "Task" [ 1288.199116] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.206117] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509271, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.377723] env[69475]: DEBUG oslo_vmware.api [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509268, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.164288} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.377920] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1288.378097] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1288.378302] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1288.378488] env[69475]: INFO nova.compute.manager [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1288.378726] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1288.378913] env[69475]: DEBUG nova.compute.manager [-] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1288.379015] env[69475]: DEBUG nova.network.neutron [-] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1288.463479] env[69475]: DEBUG oslo_vmware.api [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509269, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170302} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.463479] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1288.463479] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1288.463677] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1288.463735] env[69475]: INFO nova.compute.manager [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1288.464721] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1288.464721] env[69475]: DEBUG nova.compute.manager [-] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1288.464721] env[69475]: DEBUG nova.network.neutron [-] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1288.708566] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509271, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.722713] env[69475]: DEBUG nova.compute.manager [req-17b85cf5-7770-4d2f-a947-1c37d74081af req-0366b88d-966e-42e5-bc22-a7fc11ff242f service nova] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Received event network-vif-deleted-cc57d7c3-7051-4e4b-95c6-c1bffe25471e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1288.722914] env[69475]: INFO nova.compute.manager [req-17b85cf5-7770-4d2f-a947-1c37d74081af req-0366b88d-966e-42e5-bc22-a7fc11ff242f service nova] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Neutron deleted interface cc57d7c3-7051-4e4b-95c6-c1bffe25471e; detaching it from the instance and deleting it from the info cache [ 1288.723105] env[69475]: DEBUG nova.network.neutron [req-17b85cf5-7770-4d2f-a947-1c37d74081af req-0366b88d-966e-42e5-bc22-a7fc11ff242f service nova] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1288.871592] env[69475]: DEBUG nova.network.neutron [None req-babdaf3d-4f91-47d9-8eb3-fb8e397e4087 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Updating instance_info_cache with network_info: [{"id": "1243d440-897a-44e6-8f1e-2fbd61a5922f", "address": "fa:16:3e:27:16:3b", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1243d440-89", "ovs_interfaceid": "1243d440-897a-44e6-8f1e-2fbd61a5922f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1289.110177] env[69475]: DEBUG nova.network.neutron [-] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1289.175058] env[69475]: DEBUG nova.compute.manager [req-2a669816-c5a4-4e7e-a100-a0f50957a159 req-cdd1f625-451d-4bdc-8b78-54a197c16508 service nova] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Received event network-vif-deleted-face26ac-c45b-4932-b32e-bd2d172da60d {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1289.175285] env[69475]: INFO nova.compute.manager [req-2a669816-c5a4-4e7e-a100-a0f50957a159 req-cdd1f625-451d-4bdc-8b78-54a197c16508 service nova] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Neutron deleted interface face26ac-c45b-4932-b32e-bd2d172da60d; detaching it from the instance and deleting it from the info cache [ 1289.175490] env[69475]: DEBUG nova.network.neutron [req-2a669816-c5a4-4e7e-a100-a0f50957a159 req-cdd1f625-451d-4bdc-8b78-54a197c16508 service nova] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1289.212139] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509271, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.228946] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d6744390-3608-43c2-b319-98fb9f480d09 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.241079] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57fcc160-c707-407f-a6f4-ae56b9658cc7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.270559] env[69475]: DEBUG nova.compute.manager [req-17b85cf5-7770-4d2f-a947-1c37d74081af req-0366b88d-966e-42e5-bc22-a7fc11ff242f service nova] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Detach interface failed, port_id=cc57d7c3-7051-4e4b-95c6-c1bffe25471e, reason: Instance b6a785b0-7ae8-4856-b5a8-e017cfd376d8 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1289.375148] env[69475]: DEBUG oslo_concurrency.lockutils [None req-babdaf3d-4f91-47d9-8eb3-fb8e397e4087 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "refresh_cache-edec6d3e-1881-4d6a-9e0f-c9a177e334ad" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1289.612167] env[69475]: INFO nova.compute.manager [-] [instance: b6a785b0-7ae8-4856-b5a8-e017cfd376d8] Took 1.23 seconds to deallocate network for instance. [ 1289.654072] env[69475]: DEBUG nova.network.neutron [-] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1289.678754] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0b3f2ebd-40e1-42e6-a2ba-abf612d29591 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.690265] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a153fdfc-a39f-4964-9142-fd3382f7d713 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.713288] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509271, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.725605] env[69475]: DEBUG nova.compute.manager [req-2a669816-c5a4-4e7e-a100-a0f50957a159 req-cdd1f625-451d-4bdc-8b78-54a197c16508 service nova] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Detach interface failed, port_id=face26ac-c45b-4932-b32e-bd2d172da60d, reason: Instance d63ddc35-06b3-43a2-bdd5-a91cf4047a4b could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1290.119691] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1290.119971] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1290.120216] env[69475]: DEBUG nova.objects.instance [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lazy-loading 'resources' on Instance uuid b6a785b0-7ae8-4856-b5a8-e017cfd376d8 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1290.157107] env[69475]: INFO nova.compute.manager [-] [instance: d63ddc35-06b3-43a2-bdd5-a91cf4047a4b] Took 1.69 seconds to deallocate network for instance. [ 1290.216100] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509271, 'name': MoveVirtualDisk_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.382113] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-babdaf3d-4f91-47d9-8eb3-fb8e397e4087 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1290.382452] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-25c23658-245b-4651-8e8a-51b7e3431a47 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.392009] env[69475]: DEBUG oslo_vmware.api [None req-babdaf3d-4f91-47d9-8eb3-fb8e397e4087 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1290.392009] env[69475]: value = "task-3509272" [ 1290.392009] env[69475]: _type = "Task" [ 1290.392009] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.402612] env[69475]: DEBUG oslo_vmware.api [None req-babdaf3d-4f91-47d9-8eb3-fb8e397e4087 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509272, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.663311] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1290.717135] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509271, 'name': MoveVirtualDisk_Task} progress is 88%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.718927] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06938a4-5e48-41bc-815b-2c0a6693c29b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.728459] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91d0a37-621d-4e1e-acb7-6e4863f66caa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.762794] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bbd54c9-4b88-49fb-9640-3700da37d1c7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.772308] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af3750e-d483-44eb-97c6-f7c2bb256f0b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.789618] env[69475]: DEBUG nova.compute.provider_tree [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1290.905261] env[69475]: DEBUG oslo_vmware.api [None req-babdaf3d-4f91-47d9-8eb3-fb8e397e4087 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509272, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.218846] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509271, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.293968] env[69475]: DEBUG nova.scheduler.client.report [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1291.403609] env[69475]: DEBUG oslo_vmware.api [None req-babdaf3d-4f91-47d9-8eb3-fb8e397e4087 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509272, 'name': PowerOnVM_Task} progress is 87%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.716213] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509271, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.139143} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.716475] env[69475]: INFO nova.virt.vmwareapi.ds_util [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_41148a9d-4b89-4014-9227-47609094b60b/OSTACK_IMG_41148a9d-4b89-4014-9227-47609094b60b.vmdk to [datastore1] devstack-image-cache_base/91f6cee1-b739-4c54-a99e-94bb9b4710c5/91f6cee1-b739-4c54-a99e-94bb9b4710c5.vmdk. [ 1291.716664] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Cleaning up location [datastore1] OSTACK_IMG_41148a9d-4b89-4014-9227-47609094b60b {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1291.716861] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_41148a9d-4b89-4014-9227-47609094b60b {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1291.717124] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aa151cd2-05d4-4d9b-88d0-32a2a49218c5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.723749] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1291.723749] env[69475]: value = "task-3509273" [ 1291.723749] env[69475]: _type = "Task" [ 1291.723749] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1291.731114] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509273, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1291.799306] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.679s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1291.801631] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.138s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1291.801862] env[69475]: DEBUG nova.objects.instance [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lazy-loading 'resources' on Instance uuid d63ddc35-06b3-43a2-bdd5-a91cf4047a4b {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1291.818018] env[69475]: INFO nova.scheduler.client.report [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleted allocations for instance b6a785b0-7ae8-4856-b5a8-e017cfd376d8 [ 1291.904934] env[69475]: DEBUG oslo_vmware.api [None req-babdaf3d-4f91-47d9-8eb3-fb8e397e4087 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509272, 'name': PowerOnVM_Task, 'duration_secs': 1.150576} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.905289] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-babdaf3d-4f91-47d9-8eb3-fb8e397e4087 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1291.905501] env[69475]: DEBUG nova.compute.manager [None req-babdaf3d-4f91-47d9-8eb3-fb8e397e4087 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1291.906278] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fe0c675-b81e-41af-9f4d-e1b979f01634 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.233610] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509273, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.225753} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.233859] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1292.234033] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Releasing lock "[datastore1] devstack-image-cache_base/91f6cee1-b739-4c54-a99e-94bb9b4710c5/91f6cee1-b739-4c54-a99e-94bb9b4710c5.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1292.234282] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/91f6cee1-b739-4c54-a99e-94bb9b4710c5/91f6cee1-b739-4c54-a99e-94bb9b4710c5.vmdk to [datastore1] 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db/15d6546a-b73d-4e7c-b90b-1cd34a5eb1db.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1292.234523] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-be8180b4-cf91-4e31-953c-e94d65d6dba1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.240383] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1292.240383] env[69475]: value = "task-3509274" [ 1292.240383] env[69475]: _type = "Task" [ 1292.240383] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.247921] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509274, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.327358] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f5b6b71b-0282-43cb-a758-58d04025d5a3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "b6a785b0-7ae8-4856-b5a8-e017cfd376d8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.573s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1292.371677] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4077d2c-1732-4eff-a2ae-335616094d9f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.383484] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d05efecd-a7f5-4c9a-946c-5e7d24e6af94 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.419574] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22554c01-4adb-4c04-9f62-b3ed414903f0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.429433] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e2b3a15-27f5-426e-b31a-bf3353adf4bb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.444359] env[69475]: DEBUG nova.compute.provider_tree [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1292.751108] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509274, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.883303] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70a7bb56-fb2a-4eea-bf5a-a535ee975e08 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.891514] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-771ff459-c586-41bd-bb9e-59f63bc95dd2 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Suspending the VM {{(pid=69475) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1292.891863] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-518d9e07-752e-4df4-94ce-dd6187293ef0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.900699] env[69475]: DEBUG oslo_vmware.api [None req-771ff459-c586-41bd-bb9e-59f63bc95dd2 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1292.900699] env[69475]: value = "task-3509275" [ 1292.900699] env[69475]: _type = "Task" [ 1292.900699] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.912564] env[69475]: DEBUG oslo_vmware.api [None req-771ff459-c586-41bd-bb9e-59f63bc95dd2 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509275, 'name': SuspendVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.948494] env[69475]: DEBUG nova.scheduler.client.report [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1293.252703] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509274, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.416159] env[69475]: DEBUG oslo_vmware.api [None req-771ff459-c586-41bd-bb9e-59f63bc95dd2 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509275, 'name': SuspendVM_Task} progress is 50%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.454812] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.653s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.477318] env[69475]: INFO nova.scheduler.client.report [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Deleted allocations for instance d63ddc35-06b3-43a2-bdd5-a91cf4047a4b [ 1293.753521] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509274, 'name': CopyVirtualDisk_Task} progress is 60%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.912889] env[69475]: DEBUG oslo_vmware.api [None req-771ff459-c586-41bd-bb9e-59f63bc95dd2 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509275, 'name': SuspendVM_Task} progress is 50%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.986352] env[69475]: DEBUG oslo_concurrency.lockutils [None req-1b271c5a-ba8d-442d-9063-88fc9390eda0 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "d63ddc35-06b3-43a2-bdd5-a91cf4047a4b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.303s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1294.255180] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509274, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.413330] env[69475]: DEBUG oslo_vmware.api [None req-771ff459-c586-41bd-bb9e-59f63bc95dd2 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509275, 'name': SuspendVM_Task} progress is 50%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.754056] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509274, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.356221} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.754367] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/91f6cee1-b739-4c54-a99e-94bb9b4710c5/91f6cee1-b739-4c54-a99e-94bb9b4710c5.vmdk to [datastore1] 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db/15d6546a-b73d-4e7c-b90b-1cd34a5eb1db.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1294.755135] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-685138d1-6b67-4727-a1c6-7c5991cc7d19 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.776916] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db/15d6546a-b73d-4e7c-b90b-1cd34a5eb1db.vmdk or device None with type streamOptimized {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1294.777279] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f147110-14cb-4ee1-80a4-3a8293984408 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.797635] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1294.797635] env[69475]: value = "task-3509277" [ 1294.797635] env[69475]: _type = "Task" [ 1294.797635] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.805445] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509277, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.911820] env[69475]: DEBUG oslo_vmware.api [None req-771ff459-c586-41bd-bb9e-59f63bc95dd2 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509275, 'name': SuspendVM_Task, 'duration_secs': 1.846436} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.912168] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-771ff459-c586-41bd-bb9e-59f63bc95dd2 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Suspended the VM {{(pid=69475) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1294.912383] env[69475]: DEBUG nova.compute.manager [None req-771ff459-c586-41bd-bb9e-59f63bc95dd2 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1294.913183] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae61a715-7209-45e7-8885-dacf05d18e6f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.308977] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509277, 'name': ReconfigVM_Task, 'duration_secs': 0.310666} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.309274] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db/15d6546a-b73d-4e7c-b90b-1cd34a5eb1db.vmdk or device None with type streamOptimized {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1295.309932] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fed273d2-b7c6-4f0a-b8f8-ecf894082d14 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.317885] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1295.317885] env[69475]: value = "task-3509278" [ 1295.317885] env[69475]: _type = "Task" [ 1295.317885] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.326596] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509278, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.831345] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509278, 'name': Rename_Task, 'duration_secs': 0.149936} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.831345] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1295.831345] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d336a18b-46bd-4899-b7ed-2770e7582a1e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.839635] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1295.839635] env[69475]: value = "task-3509279" [ 1295.839635] env[69475]: _type = "Task" [ 1295.839635] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.847442] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509279, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.054377] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "2a07fd42-da76-4661-9563-d52afe52f308" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1296.054604] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "2a07fd42-da76-4661-9563-d52afe52f308" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1296.350415] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509279, 'name': PowerOnVM_Task} progress is 66%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.392810] env[69475]: INFO nova.compute.manager [None req-f365beca-8f08-452a-afdf-b4eb4f752a34 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Resuming [ 1296.393413] env[69475]: DEBUG nova.objects.instance [None req-f365beca-8f08-452a-afdf-b4eb4f752a34 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lazy-loading 'flavor' on Instance uuid edec6d3e-1881-4d6a-9e0f-c9a177e334ad {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1296.556839] env[69475]: DEBUG nova.compute.manager [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Starting instance... {{(pid=69475) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1296.611776] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "8d50b322-fa03-4e48-b74b-a63578e4701c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1296.611978] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "8d50b322-fa03-4e48-b74b-a63578e4701c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1296.612204] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "8d50b322-fa03-4e48-b74b-a63578e4701c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1296.612386] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "8d50b322-fa03-4e48-b74b-a63578e4701c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1296.612555] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "8d50b322-fa03-4e48-b74b-a63578e4701c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1296.614491] env[69475]: INFO nova.compute.manager [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Terminating instance [ 1296.851069] env[69475]: DEBUG oslo_vmware.api [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509279, 'name': PowerOnVM_Task, 'duration_secs': 1.004478} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.851069] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1296.952154] env[69475]: DEBUG nova.compute.manager [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1296.953094] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5479799a-0ee3-4d79-88ff-0421caa21303 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.080578] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1297.080851] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1297.082377] env[69475]: INFO nova.compute.claims [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1297.118372] env[69475]: DEBUG nova.compute.manager [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1297.118587] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1297.119844] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cae68da-d1df-4c0c-a86f-4ac886a9e540 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.130213] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1297.130446] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cc695d60-970f-472b-83ef-1f7bf93d1662 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.137318] env[69475]: DEBUG oslo_vmware.api [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1297.137318] env[69475]: value = "task-3509280" [ 1297.137318] env[69475]: _type = "Task" [ 1297.137318] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.145810] env[69475]: DEBUG oslo_vmware.api [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509280, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.469585] env[69475]: DEBUG oslo_concurrency.lockutils [None req-0f574a1c-e06a-42dd-a898-cebeca7025a6 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.443s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1297.647140] env[69475]: DEBUG oslo_vmware.api [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509280, 'name': PowerOffVM_Task, 'duration_secs': 0.224433} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1297.647404] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1297.647601] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1297.647863] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1172af6c-8fb1-4fb7-bc05-8442b03880b3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.714602] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1297.714853] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1297.715072] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleting the datastore file [datastore2] 8d50b322-fa03-4e48-b74b-a63578e4701c {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1297.715361] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2e1804a5-1400-437b-af71-5d8a9a75112c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.723061] env[69475]: DEBUG oslo_vmware.api [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for the task: (returnval){ [ 1297.723061] env[69475]: value = "task-3509282" [ 1297.723061] env[69475]: _type = "Task" [ 1297.723061] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.731651] env[69475]: DEBUG oslo_vmware.api [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509282, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1297.903361] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f365beca-8f08-452a-afdf-b4eb4f752a34 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "refresh_cache-edec6d3e-1881-4d6a-9e0f-c9a177e334ad" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1297.903677] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f365beca-8f08-452a-afdf-b4eb4f752a34 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquired lock "refresh_cache-edec6d3e-1881-4d6a-9e0f-c9a177e334ad" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1297.903722] env[69475]: DEBUG nova.network.neutron [None req-f365beca-8f08-452a-afdf-b4eb4f752a34 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1298.156395] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-973afca0-8236-4ea9-86b0-43651dabd9eb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.165015] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970762eb-30f1-4639-9f0a-05059c78a7b0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.198095] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75bc9f9-9f67-4530-84bc-4599458e6583 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.206240] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ced0bce-3696-449e-8a1b-8a5272637ebf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.219943] env[69475]: DEBUG nova.compute.provider_tree [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1298.232341] env[69475]: DEBUG oslo_vmware.api [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Task: {'id': task-3509282, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14186} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1298.232572] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1298.232753] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1298.232925] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1298.233107] env[69475]: INFO nova.compute.manager [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1298.233339] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1298.233519] env[69475]: DEBUG nova.compute.manager [-] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1298.233611] env[69475]: DEBUG nova.network.neutron [-] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1298.591980] env[69475]: DEBUG nova.compute.manager [req-0d3e9faf-fb47-45f1-8442-d1e7cce473d1 req-14217f64-f23c-441c-8ed9-81dc7570ec33 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Received event network-vif-deleted-ed004f95-f0d0-434e-a13d-54bff688d74e {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1298.592210] env[69475]: INFO nova.compute.manager [req-0d3e9faf-fb47-45f1-8442-d1e7cce473d1 req-14217f64-f23c-441c-8ed9-81dc7570ec33 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Neutron deleted interface ed004f95-f0d0-434e-a13d-54bff688d74e; detaching it from the instance and deleting it from the info cache [ 1298.592381] env[69475]: DEBUG nova.network.neutron [req-0d3e9faf-fb47-45f1-8442-d1e7cce473d1 req-14217f64-f23c-441c-8ed9-81dc7570ec33 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1298.595088] env[69475]: DEBUG oslo_concurrency.lockutils [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.595309] env[69475]: DEBUG oslo_concurrency.lockutils [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1298.595505] env[69475]: DEBUG oslo_concurrency.lockutils [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.595684] env[69475]: DEBUG oslo_concurrency.lockutils [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1298.595851] env[69475]: DEBUG oslo_concurrency.lockutils [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1298.598021] env[69475]: INFO nova.compute.manager [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Terminating instance [ 1298.720645] env[69475]: DEBUG nova.network.neutron [None req-f365beca-8f08-452a-afdf-b4eb4f752a34 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Updating instance_info_cache with network_info: [{"id": "1243d440-897a-44e6-8f1e-2fbd61a5922f", "address": "fa:16:3e:27:16:3b", "network": {"id": "f097b098-a5a6-46e3-938a-185d8b67a86f", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-602979701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.139", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "de2b24bdabce45a7884bdce4ed781c79", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1243d440-89", "ovs_interfaceid": "1243d440-897a-44e6-8f1e-2fbd61a5922f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1298.722513] env[69475]: DEBUG nova.scheduler.client.report [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1299.073279] env[69475]: DEBUG nova.network.neutron [-] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1299.095387] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ae2b9ba-4851-42d8-bc31-d1d97366bf35 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.106553] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f8b53b-9170-46d9-bf98-cedf0e65daa6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.116786] env[69475]: DEBUG nova.compute.manager [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1299.116970] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1299.118073] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82da28d0-9515-4c70-9efc-d1ae3fb2264f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.125126] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1299.125396] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dcd8c199-20db-4374-84da-b1eb792d8a63 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.138420] env[69475]: DEBUG nova.compute.manager [req-0d3e9faf-fb47-45f1-8442-d1e7cce473d1 req-14217f64-f23c-441c-8ed9-81dc7570ec33 service nova] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Detach interface failed, port_id=ed004f95-f0d0-434e-a13d-54bff688d74e, reason: Instance 8d50b322-fa03-4e48-b74b-a63578e4701c could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1299.140144] env[69475]: DEBUG oslo_vmware.api [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1299.140144] env[69475]: value = "task-3509283" [ 1299.140144] env[69475]: _type = "Task" [ 1299.140144] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.149451] env[69475]: DEBUG oslo_vmware.api [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509283, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.226220] env[69475]: DEBUG oslo_concurrency.lockutils [None req-f365beca-8f08-452a-afdf-b4eb4f752a34 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Releasing lock "refresh_cache-edec6d3e-1881-4d6a-9e0f-c9a177e334ad" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1299.227050] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.146s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1299.227561] env[69475]: DEBUG nova.compute.manager [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Start building networks asynchronously for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1299.231071] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29e329ff-bbf3-4929-ab3d-56eb6bb2afc0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.238979] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f365beca-8f08-452a-afdf-b4eb4f752a34 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Resuming the VM {{(pid=69475) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1299.239256] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-31db4a4a-d341-4861-8e62-0f22a45cd115 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.247673] env[69475]: DEBUG oslo_vmware.api [None req-f365beca-8f08-452a-afdf-b4eb4f752a34 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1299.247673] env[69475]: value = "task-3509284" [ 1299.247673] env[69475]: _type = "Task" [ 1299.247673] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.256135] env[69475]: DEBUG oslo_vmware.api [None req-f365beca-8f08-452a-afdf-b4eb4f752a34 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509284, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.576446] env[69475]: INFO nova.compute.manager [-] [instance: 8d50b322-fa03-4e48-b74b-a63578e4701c] Took 1.34 seconds to deallocate network for instance. [ 1299.651381] env[69475]: DEBUG oslo_vmware.api [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509283, 'name': PowerOffVM_Task, 'duration_secs': 0.23274} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.651654] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1299.651824] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1299.652115] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b1e6b94-81d1-44bc-94c7-a44abc8cf3a9 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.720806] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1299.721077] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1299.721343] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Deleting the datastore file [datastore1] 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1299.721631] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4a9e5066-e890-49ff-9405-2face8cdbe4b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.731693] env[69475]: DEBUG oslo_vmware.api [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for the task: (returnval){ [ 1299.731693] env[69475]: value = "task-3509286" [ 1299.731693] env[69475]: _type = "Task" [ 1299.731693] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.736667] env[69475]: DEBUG nova.compute.utils [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1299.738183] env[69475]: DEBUG nova.compute.manager [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Allocating IP information in the background. {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1299.738377] env[69475]: DEBUG nova.network.neutron [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] allocate_for_instance() {{(pid=69475) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1299.746051] env[69475]: DEBUG oslo_vmware.api [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509286, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.760456] env[69475]: DEBUG oslo_vmware.api [None req-f365beca-8f08-452a-afdf-b4eb4f752a34 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509284, 'name': PowerOnVM_Task} progress is 100%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.777228] env[69475]: DEBUG nova.policy [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b42f0b943ec4de7ac656612ca56a34b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9101c50cbfe74c99b1e1a528cb5b5994', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=69475) authorize /opt/stack/nova/nova/policy.py:192}} [ 1300.038940] env[69475]: DEBUG nova.network.neutron [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Successfully created port: 9906eb52-bc0b-4c1b-88a5-119b7d5794da {{(pid=69475) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1300.083392] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1300.083852] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1300.083931] env[69475]: DEBUG nova.objects.instance [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lazy-loading 'resources' on Instance uuid 8d50b322-fa03-4e48-b74b-a63578e4701c {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1300.243222] env[69475]: DEBUG nova.compute.manager [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Start building block device mappings for instance. {{(pid=69475) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1300.245988] env[69475]: DEBUG oslo_vmware.api [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Task: {'id': task-3509286, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.501624} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.250080] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1300.250080] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1300.250080] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1300.250080] env[69475]: INFO nova.compute.manager [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1300.250080] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1300.250080] env[69475]: DEBUG nova.compute.manager [-] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1300.250080] env[69475]: DEBUG nova.network.neutron [-] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1300.258799] env[69475]: DEBUG oslo_vmware.api [None req-f365beca-8f08-452a-afdf-b4eb4f752a34 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509284, 'name': PowerOnVM_Task, 'duration_secs': 0.530431} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1300.259062] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-f365beca-8f08-452a-afdf-b4eb4f752a34 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Resumed the VM {{(pid=69475) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1300.261661] env[69475]: DEBUG nova.compute.manager [None req-f365beca-8f08-452a-afdf-b4eb4f752a34 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1300.261661] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ee8968-f609-42c2-a0e9-bad4aaef01c8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.670534] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d70dd2-7e91-4be9-8c6d-eb5f5b095a89 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.681938] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ee667d-16db-419a-9a91-ea68d821dfc3 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.716908] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-705cdcd1-935e-4d7b-9c3f-656d762b42c5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.723440] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b8cfe2c-333c-4c68-ab44-054dc8d9313a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.737749] env[69475]: DEBUG nova.compute.provider_tree [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1301.120810] env[69475]: DEBUG nova.compute.manager [req-0300d31b-7851-4163-8dce-0db7cd70d163 req-fd0a5ba9-e3bd-480a-aa99-6904edda8438 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Received event network-vif-deleted-530ddca5-14b1-40c3-912c-998398a229c1 {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1301.121065] env[69475]: INFO nova.compute.manager [req-0300d31b-7851-4163-8dce-0db7cd70d163 req-fd0a5ba9-e3bd-480a-aa99-6904edda8438 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Neutron deleted interface 530ddca5-14b1-40c3-912c-998398a229c1; detaching it from the instance and deleting it from the info cache [ 1301.121282] env[69475]: DEBUG nova.network.neutron [req-0300d31b-7851-4163-8dce-0db7cd70d163 req-fd0a5ba9-e3bd-480a-aa99-6904edda8438 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.240847] env[69475]: DEBUG nova.scheduler.client.report [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1301.253582] env[69475]: DEBUG nova.compute.manager [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Start spawning the instance on the hypervisor. {{(pid=69475) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1301.284428] env[69475]: DEBUG nova.virt.hardware [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-22T09:33:40Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-22T09:33:22Z,direct_url=,disk_format='vmdk',id=afa9d32c-9f39-44fb-bf3b-50d35842a59f,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='34b98c3722d74fe5827d9c95c1df7a95',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-22T09:33:22Z,virtual_size=,visibility=), allow threads: False {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1301.284677] env[69475]: DEBUG nova.virt.hardware [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Flavor limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:362}} [ 1301.284833] env[69475]: DEBUG nova.virt.hardware [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Image limits 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:366}} [ 1301.285026] env[69475]: DEBUG nova.virt.hardware [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Flavor pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:402}} [ 1301.285186] env[69475]: DEBUG nova.virt.hardware [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Image pref 0:0:0 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:406}} [ 1301.285333] env[69475]: DEBUG nova.virt.hardware [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=69475) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:444}} [ 1301.285540] env[69475]: DEBUG nova.virt.hardware [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:583}} [ 1301.285698] env[69475]: DEBUG nova.virt.hardware [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:485}} [ 1301.285865] env[69475]: DEBUG nova.virt.hardware [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Got 1 possible topologies {{(pid=69475) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:515}} [ 1301.286048] env[69475]: DEBUG nova.virt.hardware [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:589}} [ 1301.286231] env[69475]: DEBUG nova.virt.hardware [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=69475) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:591}} [ 1301.287123] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4233c74-0143-45a3-8b20-226ee8bd4dc5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.297469] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485969c3-1d69-41da-8d23-d722bee4ea53 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.332035] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "edec6d3e-1881-4d6a-9e0f-c9a177e334ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1301.332277] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "edec6d3e-1881-4d6a-9e0f-c9a177e334ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1301.332477] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "edec6d3e-1881-4d6a-9e0f-c9a177e334ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1301.332654] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "edec6d3e-1881-4d6a-9e0f-c9a177e334ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1301.332815] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "edec6d3e-1881-4d6a-9e0f-c9a177e334ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1301.334742] env[69475]: INFO nova.compute.manager [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Terminating instance [ 1301.605307] env[69475]: DEBUG nova.network.neutron [-] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1301.624502] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f194120b-4af5-48b0-b573-ffb5d07bb731 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.635912] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4feef7ba-4aba-4a80-a894-2198e82b84bd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.662766] env[69475]: DEBUG nova.compute.manager [req-0300d31b-7851-4163-8dce-0db7cd70d163 req-fd0a5ba9-e3bd-480a-aa99-6904edda8438 service nova] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Detach interface failed, port_id=530ddca5-14b1-40c3-912c-998398a229c1, reason: Instance 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1301.745900] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.662s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1301.771570] env[69475]: INFO nova.scheduler.client.report [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Deleted allocations for instance 8d50b322-fa03-4e48-b74b-a63578e4701c [ 1301.838544] env[69475]: DEBUG nova.compute.manager [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1301.838864] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1301.840096] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9763119c-2516-492d-8e11-6331066c7fc8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.850203] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1301.850503] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-244b6e94-10a6-4ca7-b138-bcf9ea7a843d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.856246] env[69475]: DEBUG oslo_vmware.api [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1301.856246] env[69475]: value = "task-3509287" [ 1301.856246] env[69475]: _type = "Task" [ 1301.856246] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1301.864410] env[69475]: DEBUG oslo_vmware.api [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509287, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1301.932466] env[69475]: DEBUG nova.network.neutron [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Successfully updated port: 9906eb52-bc0b-4c1b-88a5-119b7d5794da {{(pid=69475) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1302.108406] env[69475]: INFO nova.compute.manager [-] [instance: 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db] Took 1.86 seconds to deallocate network for instance. [ 1302.280828] env[69475]: DEBUG oslo_concurrency.lockutils [None req-c2ac84fa-1654-47d5-92ce-4f906c16a7f3 tempest-ServerActionsTestOtherA-1764177117 tempest-ServerActionsTestOtherA-1764177117-project-member] Lock "8d50b322-fa03-4e48-b74b-a63578e4701c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.669s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1302.366391] env[69475]: DEBUG oslo_vmware.api [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509287, 'name': PowerOffVM_Task, 'duration_secs': 0.194345} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1302.366583] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1302.366746] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1302.367024] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e039498c-30ae-4226-b889-0e8c6e9b415f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.435125] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "refresh_cache-2a07fd42-da76-4661-9563-d52afe52f308" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1302.435125] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquired lock "refresh_cache-2a07fd42-da76-4661-9563-d52afe52f308" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1302.435347] env[69475]: DEBUG nova.network.neutron [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Building network info cache for instance {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1302.509463] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1302.509716] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Deleting contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1302.509861] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Deleting the datastore file [datastore1] edec6d3e-1881-4d6a-9e0f-c9a177e334ad {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1302.510136] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6165413c-9889-4b7d-a1ef-6e52eb94cfe1 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.519037] env[69475]: DEBUG oslo_vmware.api [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for the task: (returnval){ [ 1302.519037] env[69475]: value = "task-3509289" [ 1302.519037] env[69475]: _type = "Task" [ 1302.519037] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1302.527141] env[69475]: DEBUG oslo_vmware.api [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509289, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1302.614879] env[69475]: DEBUG oslo_concurrency.lockutils [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1302.615203] env[69475]: DEBUG oslo_concurrency.lockutils [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1302.615433] env[69475]: DEBUG nova.objects.instance [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lazy-loading 'resources' on Instance uuid 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1302.981718] env[69475]: DEBUG nova.network.neutron [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Instance cache missing network info. {{(pid=69475) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1303.029624] env[69475]: DEBUG oslo_vmware.api [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Task: {'id': task-3509289, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134515} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1303.034027] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1303.034027] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Deleted contents of the VM from datastore datastore1 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1303.034027] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1303.034027] env[69475]: INFO nova.compute.manager [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1303.034027] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1303.034027] env[69475]: DEBUG nova.compute.manager [-] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1303.034027] env[69475]: DEBUG nova.network.neutron [-] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1303.182921] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29779b9a-2f28-4bc2-bb74-fe9eabc027b0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.192019] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eb415fa-02ce-484e-a04e-be41b133621c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.226481] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a9c3e4a-ccdc-49b7-ab07-6be6d707b802 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.234413] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb215566-ef2e-4ea6-a2ea-dede42aa7536 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.249108] env[69475]: DEBUG nova.compute.provider_tree [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1303.278188] env[69475]: DEBUG nova.network.neutron [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Updating instance_info_cache with network_info: [{"id": "9906eb52-bc0b-4c1b-88a5-119b7d5794da", "address": "fa:16:3e:e5:fb:31", "network": {"id": "5996fb14-ef37-4ad6-bdc7-a1fe757f6765", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-259172914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9101c50cbfe74c99b1e1a528cb5b5994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55520f67-d092-4eb7-940f-d7cceaa1ca1c", "external-id": "nsx-vlan-transportzone-717", "segmentation_id": 717, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9906eb52-bc", "ovs_interfaceid": "9906eb52-bc0b-4c1b-88a5-119b7d5794da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1303.318777] env[69475]: DEBUG nova.compute.manager [req-fd225233-c1c2-4bdc-8468-76a67e43cc22 req-2c65af85-2278-4204-be2a-bd06145ddbed service nova] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Received event network-vif-plugged-9906eb52-bc0b-4c1b-88a5-119b7d5794da {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1303.319068] env[69475]: DEBUG oslo_concurrency.lockutils [req-fd225233-c1c2-4bdc-8468-76a67e43cc22 req-2c65af85-2278-4204-be2a-bd06145ddbed service nova] Acquiring lock "2a07fd42-da76-4661-9563-d52afe52f308-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1303.320391] env[69475]: DEBUG oslo_concurrency.lockutils [req-fd225233-c1c2-4bdc-8468-76a67e43cc22 req-2c65af85-2278-4204-be2a-bd06145ddbed service nova] Lock "2a07fd42-da76-4661-9563-d52afe52f308-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1303.320681] env[69475]: DEBUG oslo_concurrency.lockutils [req-fd225233-c1c2-4bdc-8468-76a67e43cc22 req-2c65af85-2278-4204-be2a-bd06145ddbed service nova] Lock "2a07fd42-da76-4661-9563-d52afe52f308-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1303.320881] env[69475]: DEBUG nova.compute.manager [req-fd225233-c1c2-4bdc-8468-76a67e43cc22 req-2c65af85-2278-4204-be2a-bd06145ddbed service nova] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] No waiting events found dispatching network-vif-plugged-9906eb52-bc0b-4c1b-88a5-119b7d5794da {{(pid=69475) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1303.321114] env[69475]: WARNING nova.compute.manager [req-fd225233-c1c2-4bdc-8468-76a67e43cc22 req-2c65af85-2278-4204-be2a-bd06145ddbed service nova] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Received unexpected event network-vif-plugged-9906eb52-bc0b-4c1b-88a5-119b7d5794da for instance with vm_state building and task_state spawning. [ 1303.321304] env[69475]: DEBUG nova.compute.manager [req-fd225233-c1c2-4bdc-8468-76a67e43cc22 req-2c65af85-2278-4204-be2a-bd06145ddbed service nova] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Received event network-changed-9906eb52-bc0b-4c1b-88a5-119b7d5794da {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1303.321514] env[69475]: DEBUG nova.compute.manager [req-fd225233-c1c2-4bdc-8468-76a67e43cc22 req-2c65af85-2278-4204-be2a-bd06145ddbed service nova] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Refreshing instance network info cache due to event network-changed-9906eb52-bc0b-4c1b-88a5-119b7d5794da. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1303.321717] env[69475]: DEBUG oslo_concurrency.lockutils [req-fd225233-c1c2-4bdc-8468-76a67e43cc22 req-2c65af85-2278-4204-be2a-bd06145ddbed service nova] Acquiring lock "refresh_cache-2a07fd42-da76-4661-9563-d52afe52f308" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1303.752157] env[69475]: DEBUG nova.scheduler.client.report [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1303.781539] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Releasing lock "refresh_cache-2a07fd42-da76-4661-9563-d52afe52f308" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1303.781865] env[69475]: DEBUG nova.compute.manager [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Instance network_info: |[{"id": "9906eb52-bc0b-4c1b-88a5-119b7d5794da", "address": "fa:16:3e:e5:fb:31", "network": {"id": "5996fb14-ef37-4ad6-bdc7-a1fe757f6765", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-259172914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9101c50cbfe74c99b1e1a528cb5b5994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55520f67-d092-4eb7-940f-d7cceaa1ca1c", "external-id": "nsx-vlan-transportzone-717", "segmentation_id": 717, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9906eb52-bc", "ovs_interfaceid": "9906eb52-bc0b-4c1b-88a5-119b7d5794da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=69475) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1303.782418] env[69475]: DEBUG oslo_concurrency.lockutils [req-fd225233-c1c2-4bdc-8468-76a67e43cc22 req-2c65af85-2278-4204-be2a-bd06145ddbed service nova] Acquired lock "refresh_cache-2a07fd42-da76-4661-9563-d52afe52f308" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1303.782602] env[69475]: DEBUG nova.network.neutron [req-fd225233-c1c2-4bdc-8468-76a67e43cc22 req-2c65af85-2278-4204-be2a-bd06145ddbed service nova] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Refreshing network info cache for port 9906eb52-bc0b-4c1b-88a5-119b7d5794da {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1303.783758] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:fb:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55520f67-d092-4eb7-940f-d7cceaa1ca1c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9906eb52-bc0b-4c1b-88a5-119b7d5794da', 'vif_model': 'vmxnet3'}] {{(pid=69475) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1303.793806] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1303.797903] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Creating VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1303.799058] env[69475]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d4e990c-6a09-4ddc-81d8-e6003eeab1db {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.826473] env[69475]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1303.826473] env[69475]: value = "task-3509290" [ 1303.826473] env[69475]: _type = "Task" [ 1303.826473] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.835157] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509290, 'name': CreateVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.007041] env[69475]: DEBUG nova.network.neutron [req-fd225233-c1c2-4bdc-8468-76a67e43cc22 req-2c65af85-2278-4204-be2a-bd06145ddbed service nova] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Updated VIF entry in instance network info cache for port 9906eb52-bc0b-4c1b-88a5-119b7d5794da. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1304.007432] env[69475]: DEBUG nova.network.neutron [req-fd225233-c1c2-4bdc-8468-76a67e43cc22 req-2c65af85-2278-4204-be2a-bd06145ddbed service nova] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Updating instance_info_cache with network_info: [{"id": "9906eb52-bc0b-4c1b-88a5-119b7d5794da", "address": "fa:16:3e:e5:fb:31", "network": {"id": "5996fb14-ef37-4ad6-bdc7-a1fe757f6765", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-259172914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9101c50cbfe74c99b1e1a528cb5b5994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55520f67-d092-4eb7-940f-d7cceaa1ca1c", "external-id": "nsx-vlan-transportzone-717", "segmentation_id": 717, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9906eb52-bc", "ovs_interfaceid": "9906eb52-bc0b-4c1b-88a5-119b7d5794da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.142167] env[69475]: DEBUG nova.network.neutron [-] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1304.257322] env[69475]: DEBUG oslo_concurrency.lockutils [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.642s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1304.282938] env[69475]: INFO nova.scheduler.client.report [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Deleted allocations for instance 15d6546a-b73d-4e7c-b90b-1cd34a5eb1db [ 1304.337553] env[69475]: DEBUG oslo_vmware.api [-] Task: {'id': task-3509290, 'name': CreateVM_Task, 'duration_secs': 0.326868} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.337889] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Created VM on the ESX host {{(pid=69475) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1304.338625] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.338794] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1304.339126] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1304.339418] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de7c54cb-107e-4c8d-9001-a7a862860783 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.344740] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1304.344740] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522a3fe7-c5a4-6fed-b13f-d6fce3cd57ec" [ 1304.344740] env[69475]: _type = "Task" [ 1304.344740] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.353494] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522a3fe7-c5a4-6fed-b13f-d6fce3cd57ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.510313] env[69475]: DEBUG oslo_concurrency.lockutils [req-fd225233-c1c2-4bdc-8468-76a67e43cc22 req-2c65af85-2278-4204-be2a-bd06145ddbed service nova] Releasing lock "refresh_cache-2a07fd42-da76-4661-9563-d52afe52f308" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1304.648378] env[69475]: INFO nova.compute.manager [-] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Took 1.62 seconds to deallocate network for instance. [ 1304.792049] env[69475]: DEBUG oslo_concurrency.lockutils [None req-891367d1-2559-4578-9287-5a5de79fe359 tempest-ServerActionsTestOtherB-141850940 tempest-ServerActionsTestOtherB-141850940-project-member] Lock "15d6546a-b73d-4e7c-b90b-1cd34a5eb1db" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.197s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1304.865570] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]522a3fe7-c5a4-6fed-b13f-d6fce3cd57ec, 'name': SearchDatastore_Task, 'duration_secs': 0.010935} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.865570] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1304.865570] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Processing image afa9d32c-9f39-44fb-bf3b-50d35842a59f {{(pid=69475) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1304.866307] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1304.866753] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquired lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1304.867047] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1304.868745] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c711b08-2dc4-4d20-8f0a-602c8f30cdc7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.882600] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=69475) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1304.882600] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=69475) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1304.882600] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1c533a5-0226-461f-bc06-158d9f382827 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.893888] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1304.893888] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521e9b38-861f-86b6-bd86-81e9358a56fb" [ 1304.893888] env[69475]: _type = "Task" [ 1304.893888] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.906455] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]521e9b38-861f-86b6-bd86-81e9358a56fb, 'name': SearchDatastore_Task, 'duration_secs': 0.010515} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.907243] env[69475]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-97ca0480-1f7f-4e53-855c-94dcadc258fa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.913520] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1304.913520] env[69475]: value = "session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523eeed0-c36d-cc1a-7e82-dc69e1f213b2" [ 1304.913520] env[69475]: _type = "Task" [ 1304.913520] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.922518] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523eeed0-c36d-cc1a-7e82-dc69e1f213b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.158360] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1305.158647] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1305.159020] env[69475]: DEBUG nova.objects.instance [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lazy-loading 'resources' on Instance uuid edec6d3e-1881-4d6a-9e0f-c9a177e334ad {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1305.353134] env[69475]: DEBUG nova.compute.manager [req-44c5ae4c-319d-4bc8-acb5-dcb8f3158d1a req-9a008278-8afc-4a49-b4a2-eace7321f736 service nova] [instance: edec6d3e-1881-4d6a-9e0f-c9a177e334ad] Received event network-vif-deleted-1243d440-897a-44e6-8f1e-2fbd61a5922f {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1305.425558] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': session[52fd7516-fb2a-7eeb-217f-3ba3e6f8e96b]523eeed0-c36d-cc1a-7e82-dc69e1f213b2, 'name': SearchDatastore_Task, 'duration_secs': 0.010677} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.425829] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Releasing lock "[datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1305.426099] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 2a07fd42-da76-4661-9563-d52afe52f308/2a07fd42-da76-4661-9563-d52afe52f308.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1305.426344] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-238e1491-671e-4df1-9d22-1257468c662f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.434266] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1305.434266] env[69475]: value = "task-3509292" [ 1305.434266] env[69475]: _type = "Task" [ 1305.434266] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.442717] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509292, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.714919] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3e7288a-c292-452a-8a95-c79481e2dc01 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.725266] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb8b2411-8e05-4462-a04d-aa2b04044b9a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.758089] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2338c9b9-eb03-4c11-a242-062f620b3943 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.766791] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f59b328d-2110-4023-8057-ef84ea3b1a51 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.781608] env[69475]: DEBUG nova.compute.provider_tree [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1305.944748] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509292, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.480197} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1305.945038] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/afa9d32c-9f39-44fb-bf3b-50d35842a59f/afa9d32c-9f39-44fb-bf3b-50d35842a59f.vmdk to [datastore2] 2a07fd42-da76-4661-9563-d52afe52f308/2a07fd42-da76-4661-9563-d52afe52f308.vmdk {{(pid=69475) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1305.945264] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Extending root virtual disk to 1048576 {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1305.945519] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7930d3bb-4b67-42a1-a15a-d3d601ed6821 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.953511] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1305.953511] env[69475]: value = "task-3509293" [ 1305.953511] env[69475]: _type = "Task" [ 1305.953511] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.963478] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509293, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.285432] env[69475]: DEBUG nova.scheduler.client.report [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1306.463608] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509293, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069306} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.463931] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Extended root virtual disk {{(pid=69475) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1306.464632] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca0fc51b-55b0-4d53-ba4c-d24ab76d9860 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.487142] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Reconfiguring VM instance instance-00000080 to attach disk [datastore2] 2a07fd42-da76-4661-9563-d52afe52f308/2a07fd42-da76-4661-9563-d52afe52f308.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1306.487383] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa9ac12a-a947-4a0d-aa9a-c93c32b58bd4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.509131] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1306.509131] env[69475]: value = "task-3509294" [ 1306.509131] env[69475]: _type = "Task" [ 1306.509131] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.517380] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509294, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.790316] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.631s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1306.810337] env[69475]: INFO nova.scheduler.client.report [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Deleted allocations for instance edec6d3e-1881-4d6a-9e0f-c9a177e334ad [ 1307.019875] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509294, 'name': ReconfigVM_Task, 'duration_secs': 0.317425} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.020180] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Reconfigured VM instance instance-00000080 to attach disk [datastore2] 2a07fd42-da76-4661-9563-d52afe52f308/2a07fd42-da76-4661-9563-d52afe52f308.vmdk or device None with type sparse {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1307.020804] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-453ad8ab-e510-484a-9223-88b8d9b4b985 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.029093] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1307.029093] env[69475]: value = "task-3509295" [ 1307.029093] env[69475]: _type = "Task" [ 1307.029093] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.037530] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509295, 'name': Rename_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.318268] env[69475]: DEBUG oslo_concurrency.lockutils [None req-ce53be2c-ffa2-40bd-a46d-b79502abf477 tempest-ServerActionsTestJSON-1088981625 tempest-ServerActionsTestJSON-1088981625-project-member] Lock "edec6d3e-1881-4d6a-9e0f-c9a177e334ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.986s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1307.539299] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509295, 'name': Rename_Task, 'duration_secs': 0.148085} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.539621] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Powering on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1307.539857] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-260b9156-5428-4304-b0ee-363e7615c91e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.546817] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1307.546817] env[69475]: value = "task-3509296" [ 1307.546817] env[69475]: _type = "Task" [ 1307.546817] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.554419] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509296, 'name': PowerOnVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.057250] env[69475]: DEBUG oslo_vmware.api [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509296, 'name': PowerOnVM_Task, 'duration_secs': 0.430345} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.057531] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Powered on the VM {{(pid=69475) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1308.057737] env[69475]: INFO nova.compute.manager [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Took 6.80 seconds to spawn the instance on the hypervisor. [ 1308.057912] env[69475]: DEBUG nova.compute.manager [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Checking state {{(pid=69475) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1308.058746] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5f55d4b-060c-4b1a-9794-94a382e7c424 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.577838] env[69475]: INFO nova.compute.manager [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Took 11.52 seconds to build instance. [ 1309.079792] env[69475]: DEBUG oslo_concurrency.lockutils [None req-bac3fa06-d472-4082-a5e0-fe981455df0e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "2a07fd42-da76-4661-9563-d52afe52f308" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.025s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1309.418655] env[69475]: DEBUG nova.compute.manager [req-97524bed-66cf-495c-9d26-e0939a224070 req-22efc194-af80-4739-ad68-b4e16ac43d7f service nova] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Received event network-changed-9906eb52-bc0b-4c1b-88a5-119b7d5794da {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1309.418949] env[69475]: DEBUG nova.compute.manager [req-97524bed-66cf-495c-9d26-e0939a224070 req-22efc194-af80-4739-ad68-b4e16ac43d7f service nova] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Refreshing instance network info cache due to event network-changed-9906eb52-bc0b-4c1b-88a5-119b7d5794da. {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1309.419224] env[69475]: DEBUG oslo_concurrency.lockutils [req-97524bed-66cf-495c-9d26-e0939a224070 req-22efc194-af80-4739-ad68-b4e16ac43d7f service nova] Acquiring lock "refresh_cache-2a07fd42-da76-4661-9563-d52afe52f308" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1309.419444] env[69475]: DEBUG oslo_concurrency.lockutils [req-97524bed-66cf-495c-9d26-e0939a224070 req-22efc194-af80-4739-ad68-b4e16ac43d7f service nova] Acquired lock "refresh_cache-2a07fd42-da76-4661-9563-d52afe52f308" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1309.419658] env[69475]: DEBUG nova.network.neutron [req-97524bed-66cf-495c-9d26-e0939a224070 req-22efc194-af80-4739-ad68-b4e16ac43d7f service nova] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Refreshing network info cache for port 9906eb52-bc0b-4c1b-88a5-119b7d5794da {{(pid=69475) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1310.151938] env[69475]: DEBUG nova.network.neutron [req-97524bed-66cf-495c-9d26-e0939a224070 req-22efc194-af80-4739-ad68-b4e16ac43d7f service nova] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Updated VIF entry in instance network info cache for port 9906eb52-bc0b-4c1b-88a5-119b7d5794da. {{(pid=69475) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1310.152340] env[69475]: DEBUG nova.network.neutron [req-97524bed-66cf-495c-9d26-e0939a224070 req-22efc194-af80-4739-ad68-b4e16ac43d7f service nova] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Updating instance_info_cache with network_info: [{"id": "9906eb52-bc0b-4c1b-88a5-119b7d5794da", "address": "fa:16:3e:e5:fb:31", "network": {"id": "5996fb14-ef37-4ad6-bdc7-a1fe757f6765", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-259172914-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9101c50cbfe74c99b1e1a528cb5b5994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55520f67-d092-4eb7-940f-d7cceaa1ca1c", "external-id": "nsx-vlan-transportzone-717", "segmentation_id": 717, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9906eb52-bc", "ovs_interfaceid": "9906eb52-bc0b-4c1b-88a5-119b7d5794da", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1310.656201] env[69475]: DEBUG oslo_concurrency.lockutils [req-97524bed-66cf-495c-9d26-e0939a224070 req-22efc194-af80-4739-ad68-b4e16ac43d7f service nova] Releasing lock "refresh_cache-2a07fd42-da76-4661-9563-d52afe52f308" {{(pid=69475) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1337.522193] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1337.522554] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1337.522594] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1337.522755] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1337.522910] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1337.523081] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1337.523236] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1337.523375] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69475) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1337.523519] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager.update_available_resource {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1338.027191] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1338.027567] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1338.027790] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1338.027996] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69475) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1338.029278] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8c61d9-327f-4b0e-b079-eff9def62cdb {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.038293] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7fc690d-f4fc-4d7b-aab6-d92170ae5ca4 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.052041] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d416cc7-0ed0-45cf-a734-47b7017fd8c2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.058378] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f710f90-a57c-48ae-835a-3cc1574dbc8c {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1338.089966] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180081MB free_disk=89GB free_vcpus=48 pci_devices=None {{(pid=69475) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1338.089966] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1338.089966] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1339.113348] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Instance 2a07fd42-da76-4661-9563-d52afe52f308 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=69475) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1737}} [ 1339.113617] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1339.113735] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1339.139730] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-847fea78-a7d2-4e6a-95af-ba6d0f7c2528 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.147238] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcaf376e-42f7-4c9e-ae62-cf5524e18b3a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.176348] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7be8cc48-3e15-49e4-b631-dadb27a93e99 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.182820] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f901bb2e-95aa-45fa-9c38-eb54e45635aa {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1339.195247] env[69475]: DEBUG nova.compute.provider_tree [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1339.698059] env[69475]: DEBUG nova.scheduler.client.report [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1340.203114] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69475) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1340.203459] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.114s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1346.862024] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "2a07fd42-da76-4661-9563-d52afe52f308" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1346.862289] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "2a07fd42-da76-4661-9563-d52afe52f308" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1347.365982] env[69475]: DEBUG nova.compute.utils [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1347.869394] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "2a07fd42-da76-4661-9563-d52afe52f308" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1348.925134] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "2a07fd42-da76-4661-9563-d52afe52f308" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1348.925530] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "2a07fd42-da76-4661-9563-d52afe52f308" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1348.925743] env[69475]: INFO nova.compute.manager [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Attaching volume 3983ee6b-c844-4c71-b5b5-370ff456e63b to /dev/sdb [ 1348.956350] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b49e88-576e-490f-a3a6-625ed9d4c4dd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.963702] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b403fc21-db84-4599-8f40-9d0bc78c5521 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.976424] env[69475]: DEBUG nova.virt.block_device [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Updating existing volume attachment record: 05827eef-591f-4a67-ad44-e67cc727d169 {{(pid=69475) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1353.523654] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Volume attach. Driver type: vmdk {{(pid=69475) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1353.523901] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701172', 'volume_id': '3983ee6b-c844-4c71-b5b5-370ff456e63b', 'name': 'volume-3983ee6b-c844-4c71-b5b5-370ff456e63b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a07fd42-da76-4661-9563-d52afe52f308', 'attached_at': '', 'detached_at': '', 'volume_id': '3983ee6b-c844-4c71-b5b5-370ff456e63b', 'serial': '3983ee6b-c844-4c71-b5b5-370ff456e63b'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1353.524849] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b743e3fb-cc7d-4fa4-8725-6959720f4f85 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.541280] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae859ba-1397-409d-8cb9-4ecb3cc5d315 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.566449] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Reconfiguring VM instance instance-00000080 to attach disk [datastore1] volume-3983ee6b-c844-4c71-b5b5-370ff456e63b/volume-3983ee6b-c844-4c71-b5b5-370ff456e63b.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1353.566839] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-05137f63-db6f-44a6-bd78-4357d9a6f9a7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.584842] env[69475]: DEBUG oslo_vmware.api [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1353.584842] env[69475]: value = "task-3509299" [ 1353.584842] env[69475]: _type = "Task" [ 1353.584842] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.595066] env[69475]: DEBUG oslo_vmware.api [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509299, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.094672] env[69475]: DEBUG oslo_vmware.api [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509299, 'name': ReconfigVM_Task, 'duration_secs': 0.328205} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.095034] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Reconfigured VM instance instance-00000080 to attach disk [datastore1] volume-3983ee6b-c844-4c71-b5b5-370ff456e63b/volume-3983ee6b-c844-4c71-b5b5-370ff456e63b.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1354.099743] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7af982a-57aa-4783-aebf-f9ab765cbfb6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.113335] env[69475]: DEBUG oslo_vmware.api [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1354.113335] env[69475]: value = "task-3509300" [ 1354.113335] env[69475]: _type = "Task" [ 1354.113335] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.120611] env[69475]: DEBUG oslo_vmware.api [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509300, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.623556] env[69475]: DEBUG oslo_vmware.api [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509300, 'name': ReconfigVM_Task, 'duration_secs': 0.151382} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.623857] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701172', 'volume_id': '3983ee6b-c844-4c71-b5b5-370ff456e63b', 'name': 'volume-3983ee6b-c844-4c71-b5b5-370ff456e63b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a07fd42-da76-4661-9563-d52afe52f308', 'attached_at': '', 'detached_at': '', 'volume_id': '3983ee6b-c844-4c71-b5b5-370ff456e63b', 'serial': '3983ee6b-c844-4c71-b5b5-370ff456e63b'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1355.657873] env[69475]: DEBUG nova.objects.instance [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lazy-loading 'flavor' on Instance uuid 2a07fd42-da76-4661-9563-d52afe52f308 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1356.164388] env[69475]: DEBUG oslo_concurrency.lockutils [None req-3c3a69d7-1e4f-45fb-963e-8a201ed01667 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "2a07fd42-da76-4661-9563-d52afe52f308" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.239s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1356.971217] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "2a07fd42-da76-4661-9563-d52afe52f308" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1356.971473] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "2a07fd42-da76-4661-9563-d52afe52f308" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1357.474539] env[69475]: DEBUG nova.compute.utils [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Using /dev/sd instead of None {{(pid=69475) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1357.977238] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "2a07fd42-da76-4661-9563-d52afe52f308" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1359.031658] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "2a07fd42-da76-4661-9563-d52afe52f308" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1359.032034] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "2a07fd42-da76-4661-9563-d52afe52f308" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1359.032159] env[69475]: INFO nova.compute.manager [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Attaching volume 592e1ea7-35fe-4819-9f91-50f12b0c11bd to /dev/sdc [ 1359.062568] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3009127-9907-4564-a989-2b7a8b5c7500 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.069707] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89e9a720-b25f-421e-926b-826d24e9734b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.083049] env[69475]: DEBUG nova.virt.block_device [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Updating existing volume attachment record: 8622161e-e955-460d-8bcc-57913e56daaa {{(pid=69475) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1363.626758] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Volume attach. Driver type: vmdk {{(pid=69475) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1363.627140] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701173', 'volume_id': '592e1ea7-35fe-4819-9f91-50f12b0c11bd', 'name': 'volume-592e1ea7-35fe-4819-9f91-50f12b0c11bd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a07fd42-da76-4661-9563-d52afe52f308', 'attached_at': '', 'detached_at': '', 'volume_id': '592e1ea7-35fe-4819-9f91-50f12b0c11bd', 'serial': '592e1ea7-35fe-4819-9f91-50f12b0c11bd'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1363.628049] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e93cf3cb-5303-4263-98c5-e63b60ec163f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.644359] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e50a884-1fe0-42aa-aa95-833d9edd7e32 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.670557] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Reconfiguring VM instance instance-00000080 to attach disk [datastore1] volume-592e1ea7-35fe-4819-9f91-50f12b0c11bd/volume-592e1ea7-35fe-4819-9f91-50f12b0c11bd.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1363.670792] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5780ae92-65d2-4da8-8ca3-c838dc440304 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.687985] env[69475]: DEBUG oslo_vmware.api [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1363.687985] env[69475]: value = "task-3509303" [ 1363.687985] env[69475]: _type = "Task" [ 1363.687985] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1363.695229] env[69475]: DEBUG oslo_vmware.api [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509303, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.197985] env[69475]: DEBUG oslo_vmware.api [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509303, 'name': ReconfigVM_Task, 'duration_secs': 0.330584} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.198279] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Reconfigured VM instance instance-00000080 to attach disk [datastore1] volume-592e1ea7-35fe-4819-9f91-50f12b0c11bd/volume-592e1ea7-35fe-4819-9f91-50f12b0c11bd.vmdk or device None with type thin {{(pid=69475) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1364.203103] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da92c92e-1eae-4686-8716-d8e9d60897f7 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1364.217941] env[69475]: DEBUG oslo_vmware.api [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1364.217941] env[69475]: value = "task-3509304" [ 1364.217941] env[69475]: _type = "Task" [ 1364.217941] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1364.225387] env[69475]: DEBUG oslo_vmware.api [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509304, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1364.727471] env[69475]: DEBUG oslo_vmware.api [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509304, 'name': ReconfigVM_Task, 'duration_secs': 0.134814} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1364.727811] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701173', 'volume_id': '592e1ea7-35fe-4819-9f91-50f12b0c11bd', 'name': 'volume-592e1ea7-35fe-4819-9f91-50f12b0c11bd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a07fd42-da76-4661-9563-d52afe52f308', 'attached_at': '', 'detached_at': '', 'volume_id': '592e1ea7-35fe-4819-9f91-50f12b0c11bd', 'serial': '592e1ea7-35fe-4819-9f91-50f12b0c11bd'} {{(pid=69475) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1365.762457] env[69475]: DEBUG nova.objects.instance [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lazy-loading 'flavor' on Instance uuid 2a07fd42-da76-4661-9563-d52afe52f308 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1366.268848] env[69475]: DEBUG oslo_concurrency.lockutils [None req-e108cb44-9a7e-491c-ab2a-b5efb2840fac tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "2a07fd42-da76-4661-9563-d52afe52f308" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.237s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1366.552287] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45e61756-bf3f-4723-84ed-904458fe078f tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "2a07fd42-da76-4661-9563-d52afe52f308" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1366.552486] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45e61756-bf3f-4723-84ed-904458fe078f tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "2a07fd42-da76-4661-9563-d52afe52f308" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1367.055406] env[69475]: INFO nova.compute.manager [None req-45e61756-bf3f-4723-84ed-904458fe078f tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Detaching volume 3983ee6b-c844-4c71-b5b5-370ff456e63b [ 1367.084579] env[69475]: INFO nova.virt.block_device [None req-45e61756-bf3f-4723-84ed-904458fe078f tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Attempting to driver detach volume 3983ee6b-c844-4c71-b5b5-370ff456e63b from mountpoint /dev/sdb [ 1367.084838] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-45e61756-bf3f-4723-84ed-904458fe078f tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1367.085038] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-45e61756-bf3f-4723-84ed-904458fe078f tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701172', 'volume_id': '3983ee6b-c844-4c71-b5b5-370ff456e63b', 'name': 'volume-3983ee6b-c844-4c71-b5b5-370ff456e63b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a07fd42-da76-4661-9563-d52afe52f308', 'attached_at': '', 'detached_at': '', 'volume_id': '3983ee6b-c844-4c71-b5b5-370ff456e63b', 'serial': '3983ee6b-c844-4c71-b5b5-370ff456e63b'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1367.085898] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b895981d-40b8-4b93-8a57-80fed7ab16c6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.110245] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf1c6e2-4bd3-43cc-84e1-b47e95e4e259 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.117281] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77a280a-be43-4802-822d-426b5f247533 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.139402] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-791292fa-feec-4adc-b15f-e1442abf74e5 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.153119] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-45e61756-bf3f-4723-84ed-904458fe078f tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] The volume has not been displaced from its original location: [datastore1] volume-3983ee6b-c844-4c71-b5b5-370ff456e63b/volume-3983ee6b-c844-4c71-b5b5-370ff456e63b.vmdk. No consolidation needed. {{(pid=69475) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1367.158282] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-45e61756-bf3f-4723-84ed-904458fe078f tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Reconfiguring VM instance instance-00000080 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1367.158520] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b9a59834-f5a9-46d7-8a19-83b49f00b756 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.174941] env[69475]: DEBUG oslo_vmware.api [None req-45e61756-bf3f-4723-84ed-904458fe078f tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1367.174941] env[69475]: value = "task-3509305" [ 1367.174941] env[69475]: _type = "Task" [ 1367.174941] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.182188] env[69475]: DEBUG oslo_vmware.api [None req-45e61756-bf3f-4723-84ed-904458fe078f tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509305, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.684320] env[69475]: DEBUG oslo_vmware.api [None req-45e61756-bf3f-4723-84ed-904458fe078f tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509305, 'name': ReconfigVM_Task, 'duration_secs': 0.211932} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.684536] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-45e61756-bf3f-4723-84ed-904458fe078f tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Reconfigured VM instance instance-00000080 to detach disk 2001 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1367.689120] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f1e9d97-26eb-4748-aa61-0f46173d0394 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1367.703828] env[69475]: DEBUG oslo_vmware.api [None req-45e61756-bf3f-4723-84ed-904458fe078f tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1367.703828] env[69475]: value = "task-3509306" [ 1367.703828] env[69475]: _type = "Task" [ 1367.703828] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1367.716924] env[69475]: DEBUG oslo_vmware.api [None req-45e61756-bf3f-4723-84ed-904458fe078f tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509306, 'name': ReconfigVM_Task} progress is 10%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1368.213032] env[69475]: DEBUG oslo_vmware.api [None req-45e61756-bf3f-4723-84ed-904458fe078f tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509306, 'name': ReconfigVM_Task, 'duration_secs': 0.127389} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1368.213340] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-45e61756-bf3f-4723-84ed-904458fe078f tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701172', 'volume_id': '3983ee6b-c844-4c71-b5b5-370ff456e63b', 'name': 'volume-3983ee6b-c844-4c71-b5b5-370ff456e63b', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a07fd42-da76-4661-9563-d52afe52f308', 'attached_at': '', 'detached_at': '', 'volume_id': '3983ee6b-c844-4c71-b5b5-370ff456e63b', 'serial': '3983ee6b-c844-4c71-b5b5-370ff456e63b'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1368.752448] env[69475]: DEBUG nova.objects.instance [None req-45e61756-bf3f-4723-84ed-904458fe078f tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lazy-loading 'flavor' on Instance uuid 2a07fd42-da76-4661-9563-d52afe52f308 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1369.761513] env[69475]: DEBUG oslo_concurrency.lockutils [None req-45e61756-bf3f-4723-84ed-904458fe078f tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "2a07fd42-da76-4661-9563-d52afe52f308" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.209s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1369.784428] env[69475]: DEBUG oslo_concurrency.lockutils [None req-eb0179cb-f2a2-49bd-a1bf-70a194cf4d7e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "2a07fd42-da76-4661-9563-d52afe52f308" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1369.784680] env[69475]: DEBUG oslo_concurrency.lockutils [None req-eb0179cb-f2a2-49bd-a1bf-70a194cf4d7e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "2a07fd42-da76-4661-9563-d52afe52f308" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1370.288340] env[69475]: INFO nova.compute.manager [None req-eb0179cb-f2a2-49bd-a1bf-70a194cf4d7e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Detaching volume 592e1ea7-35fe-4819-9f91-50f12b0c11bd [ 1370.317097] env[69475]: INFO nova.virt.block_device [None req-eb0179cb-f2a2-49bd-a1bf-70a194cf4d7e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Attempting to driver detach volume 592e1ea7-35fe-4819-9f91-50f12b0c11bd from mountpoint /dev/sdc [ 1370.317341] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb0179cb-f2a2-49bd-a1bf-70a194cf4d7e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Volume detach. Driver type: vmdk {{(pid=69475) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1370.317528] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb0179cb-f2a2-49bd-a1bf-70a194cf4d7e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701173', 'volume_id': '592e1ea7-35fe-4819-9f91-50f12b0c11bd', 'name': 'volume-592e1ea7-35fe-4819-9f91-50f12b0c11bd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a07fd42-da76-4661-9563-d52afe52f308', 'attached_at': '', 'detached_at': '', 'volume_id': '592e1ea7-35fe-4819-9f91-50f12b0c11bd', 'serial': '592e1ea7-35fe-4819-9f91-50f12b0c11bd'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1370.318424] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8889ec53-1dbc-453f-bd3c-a62e03eadf1f {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.339514] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60c2859-ac90-4f59-bc5a-4df6b326905a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.345815] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c98e95f-56eb-4521-a607-20034b171439 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.366064] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7360d2cb-c0f9-47da-8461-608af5b6397d {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.381097] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb0179cb-f2a2-49bd-a1bf-70a194cf4d7e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] The volume has not been displaced from its original location: [datastore1] volume-592e1ea7-35fe-4819-9f91-50f12b0c11bd/volume-592e1ea7-35fe-4819-9f91-50f12b0c11bd.vmdk. No consolidation needed. {{(pid=69475) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1370.386220] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb0179cb-f2a2-49bd-a1bf-70a194cf4d7e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Reconfiguring VM instance instance-00000080 to detach disk 2002 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1370.386465] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f621103-712d-46c2-a489-b1cc0edb7849 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.404635] env[69475]: DEBUG oslo_vmware.api [None req-eb0179cb-f2a2-49bd-a1bf-70a194cf4d7e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1370.404635] env[69475]: value = "task-3509307" [ 1370.404635] env[69475]: _type = "Task" [ 1370.404635] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.413652] env[69475]: DEBUG oslo_vmware.api [None req-eb0179cb-f2a2-49bd-a1bf-70a194cf4d7e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509307, 'name': ReconfigVM_Task} progress is 6%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1370.913719] env[69475]: DEBUG oslo_vmware.api [None req-eb0179cb-f2a2-49bd-a1bf-70a194cf4d7e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509307, 'name': ReconfigVM_Task, 'duration_secs': 0.229716} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1370.914096] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb0179cb-f2a2-49bd-a1bf-70a194cf4d7e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Reconfigured VM instance instance-00000080 to detach disk 2002 {{(pid=69475) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1370.918683] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-731dc0e2-12bd-45b4-a3cb-5b6b6b29b88e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1370.932642] env[69475]: DEBUG oslo_vmware.api [None req-eb0179cb-f2a2-49bd-a1bf-70a194cf4d7e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1370.932642] env[69475]: value = "task-3509308" [ 1370.932642] env[69475]: _type = "Task" [ 1370.932642] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1370.940141] env[69475]: DEBUG oslo_vmware.api [None req-eb0179cb-f2a2-49bd-a1bf-70a194cf4d7e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509308, 'name': ReconfigVM_Task} progress is 5%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1371.442197] env[69475]: DEBUG oslo_vmware.api [None req-eb0179cb-f2a2-49bd-a1bf-70a194cf4d7e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509308, 'name': ReconfigVM_Task, 'duration_secs': 0.12623} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1371.442492] env[69475]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb0179cb-f2a2-49bd-a1bf-70a194cf4d7e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-701173', 'volume_id': '592e1ea7-35fe-4819-9f91-50f12b0c11bd', 'name': 'volume-592e1ea7-35fe-4819-9f91-50f12b0c11bd', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a07fd42-da76-4661-9563-d52afe52f308', 'attached_at': '', 'detached_at': '', 'volume_id': '592e1ea7-35fe-4819-9f91-50f12b0c11bd', 'serial': '592e1ea7-35fe-4819-9f91-50f12b0c11bd'} {{(pid=69475) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1371.983436] env[69475]: DEBUG nova.objects.instance [None req-eb0179cb-f2a2-49bd-a1bf-70a194cf4d7e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lazy-loading 'flavor' on Instance uuid 2a07fd42-da76-4661-9563-d52afe52f308 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1372.992688] env[69475]: DEBUG oslo_concurrency.lockutils [None req-eb0179cb-f2a2-49bd-a1bf-70a194cf4d7e tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "2a07fd42-da76-4661-9563-d52afe52f308" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.208s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1374.149916] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "2a07fd42-da76-4661-9563-d52afe52f308" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1374.150320] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "2a07fd42-da76-4661-9563-d52afe52f308" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1374.150450] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "2a07fd42-da76-4661-9563-d52afe52f308-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1374.150627] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "2a07fd42-da76-4661-9563-d52afe52f308-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1374.150790] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "2a07fd42-da76-4661-9563-d52afe52f308-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1374.153046] env[69475]: INFO nova.compute.manager [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Terminating instance [ 1374.657571] env[69475]: DEBUG nova.compute.manager [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Start destroying the instance on the hypervisor. {{(pid=69475) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1374.657874] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Destroying instance {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1374.658793] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c583dcbf-431b-49a0-8904-84dd7f0dc7ab {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.666865] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Powering off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1374.667107] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c5ee18f-b1a1-4242-a74d-ab2a95b2e892 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1374.672760] env[69475]: DEBUG oslo_vmware.api [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1374.672760] env[69475]: value = "task-3509309" [ 1374.672760] env[69475]: _type = "Task" [ 1374.672760] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1374.680820] env[69475]: DEBUG oslo_vmware.api [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509309, 'name': PowerOffVM_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.181462] env[69475]: DEBUG oslo_vmware.api [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509309, 'name': PowerOffVM_Task, 'duration_secs': 0.158439} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.181842] env[69475]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Powered off the VM {{(pid=69475) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1375.181895] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Unregistering the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1375.182112] env[69475]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-430cfd6f-898e-401b-9254-e962193924a6 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.249828] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Unregistered the VM {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1375.250080] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Deleting contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1375.250281] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Deleting the datastore file [datastore2] 2a07fd42-da76-4661-9563-d52afe52f308 {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1375.250537] env[69475]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9c8cdde-3a9b-44ec-ab55-24416a5231fd {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1375.258555] env[69475]: DEBUG oslo_vmware.api [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for the task: (returnval){ [ 1375.258555] env[69475]: value = "task-3509311" [ 1375.258555] env[69475]: _type = "Task" [ 1375.258555] env[69475]: } to complete. {{(pid=69475) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1375.267562] env[69475]: DEBUG oslo_vmware.api [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509311, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1375.768870] env[69475]: DEBUG oslo_vmware.api [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Task: {'id': task-3509311, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13761} completed successfully. {{(pid=69475) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1375.769196] env[69475]: DEBUG nova.virt.vmwareapi.ds_util [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Deleted the datastore file {{(pid=69475) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1375.769481] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Deleted contents of the VM from datastore datastore2 {{(pid=69475) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1375.769767] env[69475]: DEBUG nova.virt.vmwareapi.vmops [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Instance destroyed {{(pid=69475) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1375.770065] env[69475]: INFO nova.compute.manager [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1375.770375] env[69475]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=69475) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1375.770573] env[69475]: DEBUG nova.compute.manager [-] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Deallocating network for instance {{(pid=69475) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1375.770668] env[69475]: DEBUG nova.network.neutron [-] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] deallocate_for_instance() {{(pid=69475) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1376.208023] env[69475]: DEBUG nova.compute.manager [req-755f84cc-cf49-4291-a530-9c3dc4781a1d req-4d9477c9-5ae8-46ce-b037-d01789e2ba28 service nova] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Received event network-vif-deleted-9906eb52-bc0b-4c1b-88a5-119b7d5794da {{(pid=69475) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1376.208023] env[69475]: INFO nova.compute.manager [req-755f84cc-cf49-4291-a530-9c3dc4781a1d req-4d9477c9-5ae8-46ce-b037-d01789e2ba28 service nova] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Neutron deleted interface 9906eb52-bc0b-4c1b-88a5-119b7d5794da; detaching it from the instance and deleting it from the info cache [ 1376.208532] env[69475]: DEBUG nova.network.neutron [req-755f84cc-cf49-4291-a530-9c3dc4781a1d req-4d9477c9-5ae8-46ce-b037-d01789e2ba28 service nova] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.691476] env[69475]: DEBUG nova.network.neutron [-] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Updating instance_info_cache with network_info: [] {{(pid=69475) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.711041] env[69475]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b8815440-f484-454e-9f4a-8adc84a74a1b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.722434] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d43c467-28ad-4f59-8575-be53d5ea098b {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.747690] env[69475]: DEBUG nova.compute.manager [req-755f84cc-cf49-4291-a530-9c3dc4781a1d req-4d9477c9-5ae8-46ce-b037-d01789e2ba28 service nova] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Detach interface failed, port_id=9906eb52-bc0b-4c1b-88a5-119b7d5794da, reason: Instance 2a07fd42-da76-4661-9563-d52afe52f308 could not be found. {{(pid=69475) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1377.194585] env[69475]: INFO nova.compute.manager [-] [instance: 2a07fd42-da76-4661-9563-d52afe52f308] Took 1.42 seconds to deallocate network for instance. [ 1377.701374] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1377.701693] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1377.701816] env[69475]: DEBUG nova.objects.instance [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lazy-loading 'resources' on Instance uuid 2a07fd42-da76-4661-9563-d52afe52f308 {{(pid=69475) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1378.234261] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8495c167-c880-4558-99a5-42da9d96741a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.241694] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ec0b5cd-9c30-4988-a96a-21649022e01e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.271887] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700735d4-62b8-44be-8803-3ee7b2b282a2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.278505] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8641634e-7ba6-4748-bb99-27458c71540a {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.291230] env[69475]: DEBUG nova.compute.provider_tree [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1378.815683] env[69475]: ERROR nova.scheduler.client.report [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] [req-70fe26c0-a3e2-491d-b3ad-500a415efdf0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID dd221100-68c1-4a75-92b5-b24d81fee5da. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-70fe26c0-a3e2-491d-b3ad-500a415efdf0"}]} [ 1378.831345] env[69475]: DEBUG nova.scheduler.client.report [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Refreshing inventories for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1378.843860] env[69475]: DEBUG nova.scheduler.client.report [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Updating ProviderTree inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1378.844095] env[69475]: DEBUG nova.compute.provider_tree [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 89, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1378.854416] env[69475]: DEBUG nova.scheduler.client.report [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Refreshing aggregate associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, aggregates: None {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1378.870728] env[69475]: DEBUG nova.scheduler.client.report [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Refreshing trait associations for resource provider dd221100-68c1-4a75-92b5-b24d81fee5da, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_NET_ATTACH_INTERFACE {{(pid=69475) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1378.891776] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1243d11b-acff-4c94-aaad-45de17685125 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.898598] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1385db43-a002-4348-854d-3e95117484f8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.927143] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36be133a-1353-4a7b-b577-8eba58948628 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.933563] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709138eb-f7b2-4d70-9a75-32f2feeedae0 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.947135] env[69475]: DEBUG nova.compute.provider_tree [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1380.085098] env[69475]: DEBUG nova.scheduler.client.report [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Updated inventory for provider dd221100-68c1-4a75-92b5-b24d81fee5da with generation 185 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1380.085409] env[69475]: DEBUG nova.compute.provider_tree [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Updating resource provider dd221100-68c1-4a75-92b5-b24d81fee5da generation from 185 to 186 during operation: update_inventory {{(pid=69475) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1380.085555] env[69475]: DEBUG nova.compute.provider_tree [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Updating inventory in ProviderTree for provider dd221100-68c1-4a75-92b5-b24d81fee5da with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1380.590416] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.889s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1380.610258] env[69475]: INFO nova.scheduler.client.report [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Deleted allocations for instance 2a07fd42-da76-4661-9563-d52afe52f308 [ 1381.117943] env[69475]: DEBUG oslo_concurrency.lockutils [None req-4d3d649f-990e-423c-bafe-19f00ae379d5 tempest-AttachVolumeTestJSON-641423761 tempest-AttachVolumeTestJSON-641423761-project-member] Lock "2a07fd42-da76-4661-9563-d52afe52f308" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.968s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1389.550230] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1389.550624] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.055639] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.055639] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.055639] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.055639] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.055639] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.055639] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.055639] env[69475]: DEBUG nova.compute.manager [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=69475) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1390.055639] env[69475]: DEBUG oslo_service.periodic_task [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Running periodic task ComputeManager.update_available_resource {{(pid=69475) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1390.558701] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1390.559018] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1390.559149] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1390.559302] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=69475) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:937}} [ 1390.560171] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edda46d6-9eb4-40b4-8bcd-8e004a3d4c34 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.568300] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c87fe97e-c994-41fc-8853-12a7db8bf3e8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.581689] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e69b24b-bdda-40ff-9264-49fb78e968cf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.587892] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6720ce23-d974-44b6-93f6-1865decb4c7e {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1390.616540] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180671MB free_disk=90GB free_vcpus=48 pci_devices=None {{(pid=69475) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1136}} [ 1390.616691] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1390.616907] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1391.635688] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1159}} [ 1391.635929] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=69475) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1168}} [ 1391.651245] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d06f3567-7854-4858-aeef-aa6ac306cbbf {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.658792] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d6c2578-8c73-4bc9-b469-24daf62ee5f8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.687981] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e3a64d-142d-4fa9-bc81-18d4186afca8 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.695316] env[69475]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd132a46-03b3-444c-9643-d239697203b2 {{(pid=69475) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1391.709690] env[69475]: DEBUG nova.compute.provider_tree [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed in ProviderTree for provider: dd221100-68c1-4a75-92b5-b24d81fee5da {{(pid=69475) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1392.213128] env[69475]: DEBUG nova.scheduler.client.report [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Inventory has not changed for provider dd221100-68c1-4a75-92b5-b24d81fee5da based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 90, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=69475) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1392.718886] env[69475]: DEBUG nova.compute.resource_tracker [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=69475) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1097}} [ 1392.719224] env[69475]: DEBUG oslo_concurrency.lockutils [None req-279fd61e-8f68-4dd2-8a77-76c66bc415de None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.102s {{(pid=69475) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}}